Skip to content
Snippets Groups Projects

Feat/dss cimprove master rebasing

Closed Andrey Samartsev requested to merge feat/DSSCimproveMasterRebasing into feat/DSSCdarksImprove
2 files
+ 247
65
Compare changes
  • Side-by-side
  • Inline
Files
2
@@ -8,7 +8,7 @@
"\n",
"Author: S. Hauf, Version: 0.1\n",
"\n",
"The following code analyzes a set of dark images taken with the DSSC detector to deduce detector offsets and noise. Data for the detector is presented in one run and don't acquire multiple gain stages.\n",
"The following code analyzes a set of dark images taken with the DSSC detector to deduce detector offsets and noise. Data for the detector is presented in one run and don't acquire multiple gain stages. \n",
"\n",
"The notebook explicitely does what pyDetLib provides in its offset calculation method for streaming data."
]
@@ -20,34 +20,47 @@
"ExecuteTime": {
"end_time": "2019-02-20T12:42:51.255184Z",
"start_time": "2019-02-20T12:42:51.225500Z"
}
},
"collapsed": true
},
"outputs": [],
"source": [
"cluster_profile = \"noDB\" # The ipcluster profile to use\n",
"in_folder = \"/gpfs/exfel/exp/SCS/201931/p900095/raw\" # path to input data, required\n",
"out_folder = \"/gpfs/exfel/data/scratch/ahmedk/test/DSSC\" # path to output to, required\n",
"in_folder = \"/gpfs/exfel/exp/SCS/202030/p900125/raw\" # path to input data, required\n",
"out_folder = \"/gpfs/exfel/data/scratch/samartse/test/\" # path to output to, required\n",
"sequences = [0] # sequence files to evaluate.\n",
"modules = [-1] # modules to run for\n",
"run = 136 # run numbr in which data was recorded, required\n",
"\n",
"run = 1497 # run number in which data was recorded, required\n",
"karabo_id = \"SCS_DET_DSSC1M-1\" # karabo karabo_id\n",
"karabo_da = ['-1'] # a list of data aggregators names, Default [-1] for selecting all data aggregators\n",
"receiver_id = \"{}CH0\" # inset for receiver devices\n",
"path_template = 'RAW-R{:04d}-{}-S{:05d}.h5' # the template to use to access data\n",
"h5path = '/INSTRUMENT/{}/DET/{}:xtdf/image' # path in the HDF5 file to images\n",
"h5path_idx = '/INDEX/{}/DET/{}:xtdf/image' # path in the HDF5 file to images\n",
"\n",
"mem_cells = 0 # number of memory cells used, set to 0 to automatically infer\n",
"local_output = True # output constants locally\n",
"db_output = False # output constants to database\n",
"bias_voltage = 300 # detector bias voltage\n",
"use_dir_creation_date = True # use the dir creation date for determining the creation time\n",
"cal_db_interface = \"tcp://max-exfl016:8020\" # the database interface to use\n",
"cal_db_timeout = 3000000 # timeout on caldb requests\"\n",
"local_output = True # output constants locally\n",
"db_output = True # output constants to database\n",
"\n",
"mem_cells = 0 # number of memory cells used, set to 0 to automatically infer\n",
"bias_voltage = 100 # detector bias voltage\n",
"rawversion = 2 # RAW file format version\n",
"dont_use_dir_date = True # don't use the dir creation date for determining the creation time\n",
"\n",
"thresholds_offset_sigma = 3. # thresholds in terms of n sigma noise for offset deduced bad pixels\n",
"thresholds_offset_hard = [4000, 8500] # thresholds in absolute ADU terms for offset deduced bad pixels\n",
"thresholds_offset_hard = [4, 125] # thresholds in absolute ADU terms for offset deduced bad pixels\n",
"\n",
"thresholds_noise_sigma = 5. # thresholds in terms of n sigma noise for offset deduced bad pixels\n",
"thresholds_noise_hard = [4, 20] # thresholds in absolute ADU terms for offset deduced bad pixels\n",
"thresholds_noise_hard = [0.1, 10] # thresholds in absolute ADU terms for offset deduced bad pixels\n",
"offset_numpy_algorithm = \"mean\"\n",
"\n",
"instrument = \"SCS\" # the instrument\n",
"high_res_badpix_3d = False # set this to True if you need high-resolution 3d bad pixel plots. Runtime: ~ 1h\n",
"modules = [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15] # modules to run for"
"modules = [i for i in range(16)] # modules to run for\n",
"slow_data_pattern = 'RAW-R{}-DA{}-S[0-9]+\\.h5'\n",
"slow_data_aggregators = {1:1,2:2,3:3,4:4} #quadrant/aggregator\n"
]
},
{
@@ -57,7 +70,8 @@
"ExecuteTime": {
"end_time": "2019-02-20T12:42:52.599660Z",
"start_time": "2019-02-20T12:42:51.472138Z"
}
},
"collapsed": false
},
"outputs": [],
"source": [
@@ -73,16 +87,23 @@
"matplotlib.use('agg')\n",
"import matplotlib.pyplot as plt\n",
"%matplotlib inline\n",
"import re\n",
"from os import listdir\n",
"\n",
"from cal_tools.tools import (gain_map_files, parse_runs, run_prop_seq_from_path, \n",
"from cal_tools.tools import (map_gain_stages, parse_runs, run_prop_seq_from_path, \n",
" get_notebook_name, get_dir_creation_date,\n",
" get_random_db_interface)\n",
" get_random_db_interface, get_from_db, save_const_to_h5)\n",
"\n",
"from cal_tools.influx import InfluxLogger\n",
"from cal_tools.enums import BadPixels\n",
"from cal_tools.plotting import show_overview, plot_badpix_3d, create_constant_overview\n",
"from cal_tools.plotting import (show_overview, plot_badpix_3d,\n",
" create_constant_overview,\n",
" show_processed_modules)\n",
"\n",
"# make sure a cluster is running with ipcluster start --n=32, give it a while to start\n",
"from ipyparallel import Client\n",
"from IPython.display import display, Markdown, Latex\n",
"import tabulate\n",
"\n",
"view = Client(profile=cluster_profile)[:]\n",
"view.use_dill()\n",
@@ -90,60 +111,39 @@
"from iCalibrationDB import ConstantMetaData, Constants, Conditions, Detectors, Versions\n",
"\n",
"\n",
"# no need to change this\n",
"\n",
"QUADRANTS = 4\n",
"MODULES_PER_QUAD = 4\n",
"DET_FILE_INSET = \"DSSC\"\n",
"h5path = h5path.format(karabo_id, receiver_id)\n",
"h5path_idx = h5path_idx.format(karabo_id, receiver_id)\n",
"gain_names = ['High', 'Medium', 'Low']\n",
"\n",
"if karabo_da[0] == '-1':\n",
" if modules[0] == -1:\n",
" modules = list(range(16))\n",
" karabo_da = [\"DSSC{:02d}\".format(i) for i in modules]\n",
"else:\n",
" modules = [int(x[-2:]) for x in karabo_da]\n",
" \n",
"max_cells = mem_cells\n",
" \n",
"offset_runs = OrderedDict()\n",
"offset_runs[\"high\"] = parse_runs(run)[0]\n",
"offset_runs[\"high\"] = run\n",
"\n",
"creation_time=None\n",
"if not dont_use_dir_date:\n",
"if use_dir_creation_date:\n",
" creation_time = get_dir_creation_date(in_folder, run)\n",
" print(f\"Using {creation_time} as creation time of constant.\")\n",
"\n",
"\n",
"run_number = run\n",
"run, prop, seq = run_prop_seq_from_path(in_folder)\n",
"logger = InfluxLogger(detector=\"DSSC\", instrument=instrument, mem_cells=mem_cells,\n",
" notebook=get_notebook_name(), proposal=prop)\n",
"\n",
"print(\"Using {} as creation time of constant.\".format(creation_time))\n",
"dinstance = \"DSSC1M1\"\n",
"\n",
"loc = None\n",
"if instrument == \"SCS\":\n",
" loc = \"SCS_DET_DSSC1M-1\"\n",
" dinstance = \"DSSC1M1\"\n",
"\n",
"print(\"Detector in use is {}\".format(loc)) \n",
"print(f\"Detector in use is {karabo_id}\") \n",
"\n",
"cal_db_interface = get_random_db_interface(cal_db_interface)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"ExecuteTime": {
"end_time": "2019-02-20T12:42:52.608214Z",
"start_time": "2019-02-20T12:42:52.601257Z"
}
},
"outputs": [],
"source": [
"print(\"Parameters are:\")\n",
"print(\"Proposal: {}\".format(prop))\n",
"print(\"Memory cells: {}/{}\".format(mem_cells, max_cells))\n",
"print(\"Runs: {}\".format([ v for v in offset_runs.values()]))\n",
"print(\"Sequences: {}\".format(sequences))\n",
"print(\"Using DB: {}\".format(db_output))\n",
"print(\"Input: {}\".format(in_folder))\n",
"print(\"Output: {}\".format(out_folder))\n",
"print(\"Bias voltage: {}V\".format(bias_voltage))"
]
},
{
"cell_type": "markdown",
"metadata": {},
@@ -158,17 +158,85 @@
"ExecuteTime": {
"end_time": "2019-02-20T12:42:54.024731Z",
"start_time": "2019-02-20T12:42:53.901555Z"
}
},
"collapsed": false
},
"outputs": [],
"source": [
"# set everything up filewise\n",
"if not os.path.exists(out_folder):\n",
" os.makedirs(out_folder)\n",
"\n",
"gmf = gain_map_files(in_folder, offset_runs, sequences, DET_FILE_INSET, QUADRANTS, MODULES_PER_QUAD)\n",
"os.makedirs(out_folder, exist_ok=True)\n",
"gmf = map_gain_stages(in_folder, offset_runs, path_template, karabo_da, sequences)\n",
"gain_mapped_files, total_sequences, total_file_size = gmf\n",
"print(\"Will process at total of {} sequences: {:0.2f} GB of data.\".format(total_sequences, total_file_size))"
"print(f\"Will process a total of {total_sequences} file.\")\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"def getDSSCctrlData(in_folder): \n",
" \n",
" from hashlib import blake2b\n",
" import struct\n",
" import binascii\n",
" \n",
" in_folder = in_folder + \"/r{:04d}/\".format(offset_runs[\"high\"])\n",
" \n",
" ## returned dictionaries\n",
" resFullConfigs = {}\n",
" resOperatingFreqs = {}\n",
" for i in range(0,16):\n",
" qm = 'Q{}M{}'.format(i//4+1, i%4+1)\n",
" resFullConfigs[qm] = None\n",
" resOperatingFreqs[qm] = None\n",
" \n",
" \n",
" \n",
" ctrlDataFiles = {}\n",
" for quadrant, aggregator in slow_data_aggregators.items():\n",
" quad_sd_pattern = slow_data_pattern.format(\"{:04d}\".format(run_number), \"{:02d}\".format(aggregator))\n",
" ctrlDataFiles[quadrant] = [f for f in os.listdir(in_folder) if re.match(quad_sd_pattern, f)]\n",
" if not len(ctrlDataFiles):\n",
" print(\"No Control Slow Data found!\")\n",
" return\n",
" \n",
" ctrlloc = h5py.File(in_folder + next(iter( ctrlDataFiles.values() ))[0], 'r')['/METADATA/dataSources/deviceId'][0]\n",
" ctrlloc = ctrlloc.decode(\"utf-8\")\n",
" ctrlloc = ctrlloc[:ctrlloc.find('/')]\n",
" \n",
" fullConfigs = {}\n",
" operatingFreqs = {}\n",
" for quadrant, file in ctrlDataFiles.items():\n",
" if len(file):\n",
" fullConfig = h5py.File(in_folder + file[0])['/RUN/{}/FPGA/PPT_Q{}/fullConfigFileName/value'\\\n",
" .format(ctrlloc, quadrant)][0].decode(\"utf-8\")\n",
" fullConfigs[quadrant] = fullConfig[fullConfig.rfind('/')+1:] \n",
" \n",
" opFreq = h5py.File(in_folder + file[0])['/RUN/{}/FPGA/PPT_Q{}/sequencer/cycleLength/value'\\\n",
" .format(ctrlloc, quadrant)][0]\n",
" operatingFreqs[quadrant] = 4.5*(22.0/opFreq)\n",
" else:\n",
" print(\"no slow data for quadrant {} is found\".format(quadrant))\n",
" \n",
" for quadrant, value in fullConfigs.items():\n",
" for module in range(1,5):\n",
" qm = 'Q{}M{}'.format(quadrant, module) \n",
" encodedvalue = blake2b(value.encode(\"utf-8\"), digest_size=8)\n",
" resFullConfigs[qm] = struct.unpack('d', binascii.unhexlify(encodedvalue.hexdigest()))[0]\n",
" \n",
" for quadrant, value in operatingFreqs.items():\n",
" for module in range(1,5):\n",
" qm = 'Q{}M{}'.format(quadrant, module)\n",
" resOperatingFreqs[qm] = value\n",
"\n",
" return resFullConfigs, resOperatingFreqs\n",
" \n",
"\n",
" \n"
]
},
{
@@ -187,13 +255,14 @@
"ExecuteTime": {
"end_time": "2019-02-20T10:50:55.839958Z",
"start_time": "2019-02-20T10:50:55.468134Z"
}
},
"collapsed": false
},
"outputs": [],
"source": [
"import copy\n",
"from functools import partial\n",
"def characterize_module(cells, bp_thresh, rawversion, loc, inp):\n",
"def characterize_module(cells, bp_thresh, rawversion, karabo_id, h5path, h5path_idx, inp):\n",
" import numpy as np\n",
" import copy\n",
" import h5py\n",
@@ -203,45 +272,49 @@
" import struct\n",
" import binascii\n",
" \n",
" def get_num_cells(fname, loc, module):\n",
" def get_num_cells(fname, h5path):\n",
" with h5py.File(fname, \"r\") as f:\n",
"\n",
" cells = f[\"INSTRUMENT/{}/DET/{}CH0:xtdf/image/cellId\".format(loc, module)][()]\n",
" cells = f[f\"{h5path}/cellId\"][()]\n",
" maxcell = np.max(cells)\n",
" options = [100, 200, 400, 500, 600, 700, 800]\n",
" dists = np.array([(o-maxcell) for o in options])\n",
" dists[dists<0] = 10000 # assure to always go higher\n",
" return options[np.argmin(dists)]\n",
" \n",
" filename, filename_out, channel = inp\n",
" filename, channel = inp\n",
" \n",
" h5path = h5path.format(channel)\n",
" h5path_idx = h5path_idx.format(channel)\n",
" \n",
"\n",
" if cells == 0:\n",
" cells = get_num_cells(filename, loc, channel)\n",
" cells = get_num_cells(filename, h5path)\n",
"\n",
" print(f\"Using {cells} memory cells\")\n",
" \n",
" pulseid_checksum = None\n",
"\n",
" thresholds_offset_hard, thresholds_offset_sigma, thresholds_noise_hard, thresholds_noise_sigma = bp_thresh \n",
"\n",
" infile = h5py.File(filename, \"r\", driver=\"core\")\n",
" if rawversion == 2:\n",
" count = np.squeeze(infile[\"/INDEX/{}/DET/{}CH0:xtdf/image/count\".format(loc, channel)])\n",
" first = np.squeeze(infile[\"/INDEX/{}/DET/{}CH0:xtdf/image/first\".format(loc, channel)])\n",
" count = np.squeeze(infile[f\"{h5path_idx}/count\"])\n",
" first = np.squeeze(infile[f\"{h5path_idx}/first\"])\n",
" last_index = int(first[count != 0][-1]+count[count != 0][-1])\n",
" first_index = int(first[count != 0][0])\n",
" pulseids = infile[\"INSTRUMENT/{}/DET/{}CH0:xtdf/image/pulseId\".format(loc, channel)][first_index:int(first[count != 0][1])]\n",
" pulseids = infile[f\"{h5path}/pulseId\"][first_index:int(first[count != 0][1])]\n",
" bveto = blake2b(pulseids.data, digest_size=8)\n",
" pulseid_checksum = struct.unpack('d', binascii.unhexlify(bveto.hexdigest()))[0]\n",
" else:\n",
" status = np.squeeze(infile[\"/INDEX/{}/DET/{}CH0:xtdf/image/status\".format(loc, channel)])\n",
" status = np.squeeze(infile[f\"{h5path_idx}/status\"])\n",
" if np.count_nonzero(status != 0) == 0:\n",
" return\n",
" last = np.squeeze(infile[\"/INDEX/{}/DET/{}CH0:xtdf/image/last\".format(loc, channel)])\n",
" first = np.squeeze(infile[\"/INDEX/{}/DET/{}CH0:xtdf/image/first\".format(loc, channel)])\n",
" last = np.squeeze(infile[f\"{h5path_idx}/last\"])\n",
" first = np.squeeze(infile[f\"{h5path_idx}/first\"])\n",
" last_index = int(last[status != 0][-1]) + 1\n",
" first_index = int(first[status != 0][0])\n",
" im = np.array(infile[\"/INSTRUMENT/{}/DET/{}CH0:xtdf/image/data\".format(loc, channel)][first_index:last_index,...]) \n",
" cellIds = np.squeeze(infile[\"/INSTRUMENT/{}/DET/{}CH0:xtdf/image/cellId\".format(loc, channel)][first_index:last_index,...]) \n",
" im = np.array(infile[f\"{h5path}/data\"][first_index:last_index,...]) \n",
" cellIds = np.squeeze(infile[f\"{h5path}/cellId\"][first_index:last_index,...]) \n",
" \n",
" infile.close()\n",
"\n",
@@ -257,7 +330,10 @@
" \n",
" for cc in np.unique(cellIds[cellIds < mcells]):\n",
" cellidx = cellIds == cc\n",
" offset[...,cc] = np.median(im[..., cellidx], axis=2)\n",
" if offset_numpy_algorithm == \"mean\":\n",
" offset[...,cc] = np.mean(im[..., cellidx], axis=2)\n",
" else:\n",
" offset[...,cc] = np.median(im[..., cellidx], axis=2)\n",
" noise[...,cc] = np.std(im[..., cellidx], axis=2)\n",
" \n",
" \n",
@@ -279,8 +355,7 @@
" bp[(noise < noise_mn-thresholds_noise_sigma*noise_std) |\n",
" (noise > noise_mn+thresholds_noise_sigma*noise_std)] |= BadPixels.NOISE_OUT_OF_THRESHOLD.value\n",
" bp[(noise < thresholds_noise_hard[0]) | (noise > thresholds_noise_hard[1])] |= BadPixels.NOISE_OUT_OF_THRESHOLD.value\n",
" bp[~np.isfinite(noise)] |= BadPixels.OFFSET_NOISE_EVAL_ERROR.value\n",
"\n",
" bp[~np.isfinite(noise)] |= BadPixels.OFFSET_NOISE_EVAL_ERROR.value \n",
"\n",
" return offset, noise, bp, cells, pulseid_checksum\n",
" \n",
@@ -295,23 +370,24 @@
"all_cells = []\n",
"checksums = {}\n",
"\n",
"fullConfigs, operatingFreqs = getDSSCctrlData(in_folder)\n",
"\n",
"for gain, mapped_files in gain_mapped_files.items():\n",
" \n",
" inp = []\n",
" dones = []\n",
" for i in modules:\n",
" qm = \"Q{}M{}\".format(i//4 +1, i % 4 + 1) \n",
" if qm in mapped_files and not mapped_files[qm].empty():\n",
" fname_in = mapped_files[qm].get() \n",
" fname_in = mapped_files[qm].get()\n",
" print(\"Process file: \", fname_in)\n",
" dones.append(mapped_files[qm].empty())\n",
" else:\n",
" continue\n",
" fout = os.path.abspath(\"{}/{}\".format(out_folder, (os.path.split(fname_in)[-1]).replace(\"RAW\", \"CORR\")))\n",
" inp.append((fname_in, fout, i))\n",
" first = False\n",
" inp.append((fname_in, i))\n",
"\n",
" p = partial(characterize_module, max_cells,\n",
" (thresholds_offset_hard, thresholds_offset_sigma,\n",
" thresholds_noise_hard, thresholds_noise_sigma), rawversion, loc)\n",
" thresholds_noise_hard, thresholds_noise_sigma), rawversion, karabo_id, h5path, h5path_idx)\n",
" results = list(map(p, inp))\n",
" \n",
" for ii, r in enumerate(results):\n",
@@ -336,8 +412,53 @@
" total_sequences=total_sequences,\n",
" filesize=total_file_size)\n",
"logger.send()\n",
"max_cells = np.max(all_cells)\n",
"print(\"Using {} memory cells\".format(max_cells))"
"if len(all_cells) > 0:\n",
" max_cells = np.max(all_cells)\n",
" print(f\"Using {max_cells} memory cells\")\n",
"else:\n",
" raise ValueError(\"0 processed memory cells. No raw data available.\")\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"# Retrieve existing constants for comparison\n",
"clist = [\"Offset\", \"Noise\"]\n",
"old_const = {}\n",
"old_mdata = {}\n",
"\n",
"print('Retrieve pre-existing constants for comparison.')\n",
"detinst = getattr(Detectors, dinstance)\n",
"for qm in offset_g.keys():\n",
" device = getattr(detinst, qm)\n",
" for const in clist:\n",
" condition = Conditions.Dark.DSSC(memory_cells=max_cells,\n",
" bias_voltage=bias_voltage,\n",
" pulseid_checksum=checksums[qm],\n",
" acquisition_rate=operatingFreqs[qm], \n",
" configid_checksum=fullConfigs[qm])\n",
"\n",
" data, mdata = get_from_db(device,\n",
" getattr(Constants.DSSC, const)(),\n",
" condition,\n",
" None,\n",
" cal_db_interface, creation_time=creation_time,\n",
" verbosity=2, timeout=cal_db_timeout)\n",
"\n",
" old_const[const] = data\n",
"\n",
" if mdata is not None and data is not None:\n",
" time = mdata.calibration_constant_version.begin_at\n",
" old_mdata[const] = time.isoformat()\n",
" os.makedirs(f'{out_folder}/old/', exist_ok=True)\n",
" save_const_to_h5(mdata, f'{out_folder}/old/')\n",
" else:\n",
" old_mdata[const] = \"Not found\""
]
},
{
@@ -347,26 +468,22 @@
"ExecuteTime": {
"end_time": "2018-12-06T09:38:18.234582Z",
"start_time": "2018-12-06T09:38:18.222838Z"
}
},
"collapsed": false
},
"outputs": [],
"source": [
"res = OrderedDict()\n",
"for i in modules:\n",
" qm = \"Q{}M{}\".format(i//4+1, i%4+1)\n",
" res[qm] = {'Offset': offset_g[qm],\n",
" 'Noise': noise_g[qm],\n",
" 'BadPixels': badpix_g[qm] \n",
" }\n",
" \n",
"if local_output:\n",
" for qm in offset_g.keys():\n",
" ofile = \"{}/dssc_offset_store_{}_{}.h5\".format(out_folder, \"_\".join(offset_runs.values()), qm)\n",
" store_file = h5py.File(ofile, \"w\")\n",
" store_file[\"{}/Offset/0/data\".format(qm)] = offset_g[qm]\n",
" store_file[\"{}/Noise/0/data\".format(qm)] = noise_g[qm]\n",
" store_file[\"{}/BadPixels/0/data\".format(qm)] = badpix_g[qm]\n",
" store_file.close()"
" qm = f\"Q{i//4+1}M{i%4+1}\"\n",
" try:\n",
" res[qm] = {'Offset': offset_g[qm],\n",
" 'Noise': noise_g[qm],\n",
" #TODO: No badpixelsdark, yet.\n",
" #'BadPixelsDark': badpix_g[qm] \n",
" }\n",
" except Exception as e:\n",
" print(f\"Error: No constants for {qm}: {e}\")"
]
},
{
@@ -376,91 +493,92 @@
"ExecuteTime": {
"end_time": "2018-12-06T09:49:32.449330Z",
"start_time": "2018-12-06T09:49:20.231607Z"
}
},
"collapsed": false
},
"outputs": [],
"source": [
"if db_output:\n",
" for dont_use_pulseIds in [True, False]:\n",
" for qm in offset_g.keys():\n",
" try:\n",
" metadata = ConstantMetaData()\n",
" offset = Constants.DSSC.Offset()\n",
" offset.data = offset_g[qm]\n",
" metadata.calibration_constant = offset\n",
" pidsum = None if dont_use_pulseIds else checksums[qm]\n",
" # set the operating condition\n",
" condition = Conditions.Dark.DSSC(memory_cells=max_cells, bias_voltage=bias_voltage,\n",
" pulseid_checksum=pidsum)\n",
" detinst = getattr(Detectors, dinstance)\n",
"\n",
" device = getattr(detinst, qm)\n",
"\n",
" metadata.detector_condition = condition\n",
"\n",
" # specify the a version for this constant\n",
" if creation_time is None:\n",
" metadata.calibration_constant_version = Versions.Now(device=device)\n",
" else:\n",
" metadata.calibration_constant_version = Versions.Timespan(device=device, start=creation_time)\n",
" metadata.send(cal_db_interface, timeout=3000000)\n",
"\n",
"\n",
" metadata = ConstantMetaData()\n",
" noise = Constants.DSSC.Noise()\n",
" noise.data = noise_g[qm]\n",
" metadata.calibration_constant = noise\n",
"\n",
" # set the operating condition\n",
" condition = Conditions.Dark.DSSC(memory_cells=max_cells, bias_voltage=bias_voltage,\n",
" pulseid_checksum=pidsum)\n",
" metadata.detector_condition = condition\n",
"\n",
" # specify the a version for this constant\n",
" if creation_time is None:\n",
" metadata.calibration_constant_version = Versions.Now(device=device)\n",
" else:\n",
" metadata.calibration_constant_version = Versions.Timespan(device=device, start=creation_time)\n",
" metadata.send(cal_db_interface, timeout=3000000)\n",
"\n",
" continue # no bad pixels yet\n",
" metadata = ConstantMetaData()\n",
" badpixels = Constants.DSSC.BadPixelsDark()\n",
" badpixels.data = badpix_g[qm]\n",
" metadata.calibration_constant = badpixels\n",
"\n",
" # set the operating condition\n",
" condition = Conditions.Dark.DSSC(memory_cells=max_cells, bias_voltage=bias_voltage,\n",
" pulseid_checksum=pidsum)\n",
" metadata.detector_condition = condition\n",
"\n",
" # specify the a version for this constant\n",
" if creation_time is None:\n",
" metadata.calibration_constant_version = Versions.Now(device=device)\n",
" else:\n",
" metadata.calibration_constant_version = Versions.Timespan(device=device, start=creation_time)\n",
" metadata.send(cal_db_interface, timeout=3000000)\n",
" except Exception as e:\n",
" print(e)"
"# Push the same constant two different times.\n",
"# One with the generated pulseID check sum setting for the offline calibration.\n",
"# And another for the online calibration as it doesn't have this pulseID checksum, yet.\n",
"for dont_use_pulseIds in [True, False]:\n",
" for qm in res.keys():\n",
" detinst = getattr(Detectors, dinstance)\n",
" device = getattr(detinst, qm)\n",
"\n",
" for const in res[qm].keys():\n",
"\n",
" metadata = ConstantMetaData()\n",
" dconst = getattr(Constants.DSSC, const)()\n",
" dconst.data = res[qm][const]\n",
" metadata.calibration_constant = dconst\n",
"\n",
" pidsum = None if dont_use_pulseIds else checksums[qm]\n",
" opfreq = None if dont_use_pulseIds else operatingFreqs[qm]\n",
" configset = None if dont_use_pulseIds else fullConfigs[qm]\n",
" \n",
" \n",
" # set the operating condition\n",
" condition = Conditions.Dark.DSSC(memory_cells=max_cells,\n",
" bias_voltage=bias_voltage,\n",
" pulseid_checksum=pidsum,\n",
" acquisition_rate=opfreq,\n",
" configid_checksum=configset)\n",
" \n",
" metadata.detector_condition = condition\n",
"\n",
" # specify the a version for this constant\n",
" if creation_time is None:\n",
" metadata.calibration_constant_version = Versions.Now(device=device)\n",
" else:\n",
" metadata.calibration_constant_version = Versions.Timespan(device=device, start=creation_time)\n",
"\n",
" if db_output:\n",
" try:\n",
" metadata.send(cal_db_interface, timeout=cal_db_timeout)\n",
" except Exception as e:\n",
" print(\"Error\", e)\n",
"\n",
" if local_output:\n",
" # Don't save constant localy two times.\n",
" if dont_use_pulseIds:\n",
" save_const_to_h5(metadata, out_folder)\n",
" print(f\"Calibration constant {const} is stored locally.\\n\")\n",
" \n",
" if not dont_use_pulseIds:\n",
" print(\"Generated constants with conditions:\\n\")\n",
" print(f\"• memory_cells: {max_cells}\\n• bias_voltage: {bias_voltage}\\n\"\n",
" f\"• pulseid_checksum: {pidsum}\\n• creation_time: {creation_time}\\n\")\n"
]
},
{
"cell_type": "markdown",
"metadata": {},
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"## Single-Cell Overviews ##\n",
"\n",
"Single cell overviews allow to identify potential effects on all memory cells, e.g. on sensor level. Additionally, they should serve as a first sanity check on expected behaviour, e.g. if structuring on the ASIC level is visible in the offsets, but otherwise no immediate artifacts are visible."
"mnames = []\n",
"for i in modules:\n",
" qm = f\"Q{i//4+1}M{i % 4+1}\"\n",
" display(Markdown(f'## Position of the module {mnames} and it\\'s ASICs##'))\n",
" mnames.append(qm)\n",
" \n",
"show_processed_modules(dinstance=dinstance, constants=None, mnames=mnames, mode=\"position\")\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"for r in res.values():\n",
" del r[\"BadPixels\"]"
"## Single-Cell Overviews ##\n",
"\n",
"Single cell overviews allow to identify potential effects on all memory cells, e.g. on sensor level. Additionally, they should serve as a first sanity check on expected behaviour, e.g. if structuring on the ASIC level is visible in the offsets, but otherwise no immediate artifacts are visible."
]
},
{
@@ -471,6 +589,7 @@
"end_time": "2018-12-06T09:49:14.540552Z",
"start_time": "2018-12-06T09:49:13.009674Z"
},
"collapsed": false,
"scrolled": false
},
"outputs": [],
@@ -478,22 +597,14 @@
"cell = 9\n",
"gain = 0\n",
"out_folder = None\n",
"show_overview(res, cell, gain, out_folder=out_folder, infix=\"_\".join(offset_runs.values()))"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Global Bad Pixel Behaviour ##\n",
"\n",
"The following plots show the results of bad pixel evaluation for all evaluated memory cells. Cells are stacked in the Z-dimension, while pixels values in x/y are rebinned with a factor of 2. This excludes single bad pixels present only in disconnected pixels. Hence, any bad pixels spanning at least 4 pixels in the x/y-plane, or across at least two memory cells are indicated. Colors encode the bad pixel type, or mixed type."
"show_overview(res, cell, gain, out_folder=out_folder, infix=\"_{}\".format(run))"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": true,
"scrolled": false
},
"outputs": [],
@@ -503,11 +614,24 @@
" BadPixels.OFFSET_OUT_OF_THRESHOLD.value: (BadPixels.OFFSET_OUT_OF_THRESHOLD.name, '#00FF0080'),\n",
" BadPixels.OFFSET_OUT_OF_THRESHOLD.value | BadPixels.NOISE_OUT_OF_THRESHOLD.value: ('MIXED', '#DD00DD80')}\n",
"\n",
"rebin = 8 if not high_res_badpix_3d else 2\n",
"\n",
"gain = 0\n",
"for mod, data in badpix_g.items():\n",
" plot_badpix_3d(data, cols, title=mod, rebin_fac=rebin)"
"if high_res_badpix_3d:\n",
" display(Markdown(\"\"\"\n",
" \n",
" ## Global Bad Pixel Behaviour ##\n",
"\n",
" The following plots show the results of bad pixel evaluation for all evaluated memory cells. \n",
" Cells are stacked in the Z-dimension, while pixels values in x/y are rebinned with a factor of 2. \n",
" This excludes single bad pixels present only in disconnected pixels. \n",
" Hence, any bad pixels spanning at least 4 pixels in the x/y-plane, or across at least two memory cells are indicated. \n",
" Colors encode the bad pixel type, or mixed type.\n",
"\n",
" \"\"\"))\n",
" # set rebin_fac to 1 for avoiding rebining and\n",
" # losing real values of badpixels(High resolution).\n",
" gain = 0\n",
" for mod, data in badpix_g.items():\n",
" plot_badpix_3d(data, cols, title=mod, rebin_fac=2)\n",
" plt.show()"
]
},
{
@@ -523,50 +647,103 @@
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": false,
"scrolled": false
},
"outputs": [],
"source": [
"create_constant_overview(offset_g, \"Offset (ADU)\", max_cells,\n",
" out_folder=out_folder, infix=\"_\".join(offset_runs.values()), entries=1)"
"create_constant_overview(offset_g, \"Offset (ADU)\", max_cells, entries=1)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": false,
"scrolled": false
},
"outputs": [],
"source": [
"create_constant_overview(noise_g, \"Noise (ADU)\", max_cells, 0, 100,\n",
" out_folder=out_folder, infix=\"_\".join(offset_runs.values()), entries=1)"
"create_constant_overview(noise_g, \"Noise (ADU)\", max_cells, 0, 100, entries=1)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"bad_pixel_aggregate_g = OrderedDict()\n",
"for m, d in badpix_g.items():\n",
" bad_pixel_aggregate_g[m] = d.astype(np.bool).astype(np.float)\n",
"create_constant_overview(bad_pixel_aggregate_g, \"Bad pixel fraction\", max_cells, entries=1,\n",
" out_folder=out_folder, infix=\"_\".join(offset_runs.values()))"
"create_constant_overview(bad_pixel_aggregate_g, \"Bad pixel fraction\", max_cells, entries=1)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Summary tables ##\n",
"\n",
"The following tables show summary information for the evaluated module. Values for currently evaluated constants are compared with values for pre-existing constants retrieved from the calibration database."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"header = ['Parameter', \n",
" \"New constant\", \"Old constant \", \n",
" \"New constant\", \"Old constant \", \n",
" \"New constant\", \"Old constant \"]\n",
"\n",
"for const in ['Offset', 'Noise']:\n",
" table = [['','High gain', 'High gain']]\n",
" for qm in res.keys():\n",
"\n",
" data = np.copy(res[qm][const])\n",
"\n",
" if old_const[const] is not None:\n",
" dataold = np.copy(old_const[const])\n",
"\n",
" f_list = [np.nanmedian, np.nanmean, np.nanstd, np.nanmin, np.nanmax]\n",
" n_list = ['Median', 'Mean', 'Std', 'Min', 'Max']\n",
"\n",
" for i, f in enumerate(f_list):\n",
" line = [n_list[i]]\n",
" line.append('{:6.1f}'.format(f(data[...,gain])))\n",
" if old_const[const] is not None:\n",
" line.append('{:6.1f}'.format(f(dataold[...,gain])))\n",
" else:\n",
" line.append('-')\n",
"\n",
" table.append(line)\n",
"\n",
" display(Markdown('### {} [ADU], good and bad pixels ###'.format(const)))\n",
" md = display(Latex(tabulate.tabulate(table, tablefmt='latex', headers=header))) "
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"metadata": {
"collapsed": true
},
"outputs": [],
"source": []
}
@@ -587,7 +764,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.6.7"
"version": "3.6.6"
}
},
"nbformat": 4,
Loading