Skip to content
GitLab
Explore
Sign in
Primary navigation
Search or go to…
Project
P
pycalibration
Manage
Activity
Members
Labels
Code
Merge requests
Repository
Branches
Commits
Tags
Repository graph
Compare revisions
Deploy
Model registry
Analyze
Contributor analytics
Model experiments
Help
Help
Support
GitLab documentation
Compare GitLab plans
Community forum
Contribute to GitLab
Provide feedback
Keyboard shortcuts
?
Snippets
Groups
Projects
Show more breadcrumbs
calibration
pycalibration
Commits
91e05cb3
Commit
91e05cb3
authored
1 year ago
by
Egor Sobolev
Browse files
Options
Downloads
Patches
Plain Diff
Support multiple module detector in Characterize_DynamicFF_NBC.ipynb
parent
035e0ffa
No related branches found
Branches containing commit
No related tags found
No related merge requests found
Changes
1
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
notebooks/DynamicFF/Characterize_DynamicFF_NBC.ipynb
+132
-171
132 additions, 171 deletions
notebooks/DynamicFF/Characterize_DynamicFF_NBC.ipynb
with
132 additions
and
171 deletions
notebooks/DynamicFF/Characterize_DynamicFF_NBC.ipynb
+
132
−
171
View file @
91e05cb3
...
@@ -4,7 +4,7 @@
...
@@ -4,7 +4,7 @@
"cell_type": "markdown",
"cell_type": "markdown",
"metadata": {},
"metadata": {},
"source": [
"source": [
"#
Shimadzu HPVX2
Characterization of dark and flat field\n",
"# Characterization of dark and flat field
for Dynamic Flat Field correction
\n",
"\n",
"\n",
"Author: Egor Sobolev\n",
"Author: Egor Sobolev\n",
"\n",
"\n",
...
@@ -17,21 +17,24 @@
...
@@ -17,21 +17,24 @@
"metadata": {},
"metadata": {},
"outputs": [],
"outputs": [],
"source": [
"source": [
"in_folder = \"/gpfs/exfel/exp/SPB/202
121
/p00
2919
/raw
/
\" # input folder, required\n",
"in_folder = \"/gpfs/exfel/exp/SPB/202
430
/p
9
00
425
/raw\" # input folder, required\n",
"out_folder = '/gpfs/exfel/data/scratch/esobolev/test/shimadzu' # output folder, required\n",
"out_folder = '/gpfs/exfel/data/scratch/esobolev/test/shimadzu' # output folder, required\n",
"metadata_folder = \"\" # Directory containing calibration_metadata.yml when run by xfel-calibrate\n",
"metadata_folder = \"\" # Directory containing calibration_metadata.yml when run by xfel-calibrate\n",
"dark_run =
59
# which run to read data from, required\n",
"dark_run =
1
# which run to read data from, required\n",
"flat_run =
40
# which run to read\n",
"flat_run =
2
# which run to read\n",
"\n",
"\n",
"# Data files parameters.\n",
"# Data files parameters.\n",
"karabo_da = ['HPVX01'] # data aggregators\n",
"karabo_da = ['HPVX01
/1', 'HPVX01/2
'] # data aggregators\n",
"karabo_id = \"SPB_EHD_
HPVX2_2
\" # karabo prefix of Shimadzu HPV-X2 devices\n",
"karabo_id = \"SPB_EHD_
MIC
\" # karabo prefix of Shimadzu HPV-X2 devices\n",
"\n",
"\n",
"#receiver_id = \"PNCCD_FMT-0\" # inset for receiver devices\n",
"#receiver_id = \"PNCCD_FMT-0\" # inset for receiver devices\n",
"#path_template = 'RAW-R{:04d}-{}-S{{:05d}}.h5' # the template to use to access data\n",
"#path_template = 'RAW-R{:04d}-{}-S{{:05d}}.h5' # the template to use to access data\n",
"instrument_source_template = '{}/CAM/CAMERA:daqOutput' # data source path in h5file. Template filled with karabo_id and receiver_id\n",
"instrument_source_template = 'SPB_EHD_MIC/CAM/HPVX2_{module}:daqOutput' # data source path in h5file.\n",
"#instrument_source_template = 'SPB_EHD_HPVX2_{module}/CAM/CAMERA:daqOutput'\n",
"image_key = \"data.image.pixels\" # image data key in Karabo or exdf notation\n",
"image_key = \"data.image.pixels\" # image data key in Karabo or exdf notation\n",
"\n",
"\n",
"db_module_template = \"Shimadzu_HPVX2_{}\"\n",
"\n",
"# Database access parameters.\n",
"# Database access parameters.\n",
"use_dir_creation_date = True # use dir creation date as data production reference date\n",
"use_dir_creation_date = True # use dir creation date as data production reference date\n",
"cal_db_interface = \"tcp://max-exfl-cal001:8021\" # calibration DB interface to use\n",
"cal_db_interface = \"tcp://max-exfl-cal001:8021\" # calibration DB interface to use\n",
...
@@ -40,7 +43,7 @@
...
@@ -40,7 +43,7 @@
"local_output = True # if True, the notebook saves dark constants locally\n",
"local_output = True # if True, the notebook saves dark constants locally\n",
"creation_time = \"\" # To overwrite the measured creation_time. Required Format: YYYY-MM-DD HR:MN:SC.00 e.g. 2019-07-04 11:02:41.00\n",
"creation_time = \"\" # To overwrite the measured creation_time. Required Format: YYYY-MM-DD HR:MN:SC.00 e.g. 2019-07-04 11:02:41.00\n",
"\n",
"\n",
"n_components = 50"
"n_components = 50
# Number of principal components to compute
"
]
]
},
},
{
{
...
@@ -58,6 +61,7 @@
...
@@ -58,6 +61,7 @@
"import time\n",
"import time\n",
"import numpy as np\n",
"import numpy as np\n",
"import matplotlib.pyplot as plt\n",
"import matplotlib.pyplot as plt\n",
"from IPython.display import display, Markdown\n",
"\n",
"\n",
"from extra_data import RunDirectory\n",
"from extra_data import RunDirectory\n",
"\n",
"\n",
...
@@ -99,14 +103,29 @@
...
@@ -99,14 +103,29 @@
"report = get_report(metadata_folder)\n",
"report = get_report(metadata_folder)\n",
"cal_db_interface = get_random_db_interface(cal_db_interface)\n",
"cal_db_interface = get_random_db_interface(cal_db_interface)\n",
"print(f'Calibration database interface: {cal_db_interface}')\n",
"print(f'Calibration database interface: {cal_db_interface}')\n",
"\n",
"print()\n",
"instrument = karabo_id.split(\"_\")[0]\n",
"\n",
"source = instrument_source_template.format(karabo_id)\n",
"instrument, part, component = karabo_id.split('_')\n",
"\n",
"sources = {}\n",
"source_to_db = {}\n",
"print(\"Sources:\")\n",
"for da in karabo_da:\n",
" aggr, _, module = da.partition('/')\n",
" source_name = instrument_source_template.format(\n",
" instrument=instrument, part=part, component=component,\n",
" module=module\n",
" )\n",
" sources[source_name] = aggr\n",
" source_to_db[source_name] = db_module_template.format(module)\n",
" print('-', source_name)\n",
"print()\n",
"\n",
"\n",
"print(f\"Detector in use is {karabo_id}\")\n",
"print(f\"Detector in use is {karabo_id}\")\n",
"print(f\"Instrument {instrument}\")\n",
"print(f\"Instrument {instrument}\")\n",
"\n",
"\n",
"step_timer = StepTimer()"
"step_timer = StepTimer()\n",
"constants = {}"
]
]
},
},
{
{
...
@@ -122,41 +141,36 @@
...
@@ -122,41 +141,36 @@
"metadata": {},
"metadata": {},
"outputs": [],
"outputs": [],
"source": [
"source": [
"step_timer.start()\n",
"for source, aggr in sources.items():\n",
"\n",
" display(Markdown(f\"## {source}\"))\n",
"dark_dc = RunDirectory(f\"{in_folder}/r{dark_run:04d}\")\n",
"\n",
"dark_dc = dark_dc.select([(source, image_key)])\n",
" # read\n",
"key_data = dark_dc[source][image_key]\n",
" step_timer.start()\n",
"\n",
" dark_dc = RunDirectory(f\"{in_folder}/r{dark_run:04d}\",\n",
"images_dark = key_data.ndarray()\n",
" include=f\"RAW-R{dark_run:04d}-{aggr}-S*.h5\")\n",
"ntrain, npulse, ny, nx = images_dark.shape\n",
" dark_dc = dark_dc.select([(source, image_key)])\n",
"\n",
" key_data = dark_dc[source][image_key]\n",
"print(f\"N image: {ntrain * npulse} (ntrain: {ntrain}, npulse: {npulse})\")\n",
"\n",
"print(f\"Image size: {ny} x {nx} px\")\n",
" images_dark = key_data.ndarray()\n",
"step_timer.done_step(\"Read dark images\")"
" ntrain, npulse, ny, nx = images_dark.shape\n",
]
"\n",
},
" print(f\"N image: {ntrain * npulse} (ntrain: {ntrain}, npulse: {npulse})\")\n",
{
" print(f\"Image size: {ny} x {nx} px\")\n",
"cell_type": "code",
" step_timer.done_step(\"Read dark images\")\n",
"execution_count": null,
"\n",
"metadata": {},
" # process\n",
"outputs": [],
" step_timer.start()\n",
"source": [
" dark = dffc.process_dark(images_dark)\n",
"step_timer.start()\n",
" module_constants = constants.setdefault(source, {})\n",
"dark = dffc.process_dark(images_dark)\n",
" module_constants[\"Offset\"] = dark\n",
"step_timer.done_step(\"Process dark images\")"
" step_timer.done_step(\"Process dark images\")\n",
]
" display()\n",
},
"\n",
{
" # draw plots\n",
"cell_type": "code",
" step_timer.start()\n",
"execution_count": null,
" plot_camera_image(dark)\n",
"metadata": {},
" plt.show()\n",
"outputs": [],
" step_timer.done_step(\"Draw offsets\")"
"source": [
"step_timer.start()\n",
"plot_camera_image(dark)\n",
"plt.show()\n",
"step_timer.done_step(\"Draw offset map\")"
]
]
},
},
{
{
...
@@ -169,93 +183,56 @@
...
@@ -169,93 +183,56 @@
{
{
"cell_type": "code",
"cell_type": "code",
"execution_count": null,
"execution_count": null,
"metadata": {},
"metadata": {
"outputs": [],
"scrolled": false
"source": [
},
"step_timer.start()\n",
"\n",
"flat_dc = RunDirectory(f\"{in_folder}/r{flat_run:04d}\")\n",
"flat_dc = flat_dc.select([(source, image_key)])\n",
"key_data = flat_dc[source][image_key]\n",
"\n",
"images_flat = key_data.ndarray()\n",
"ntrain, npulse, ny, nx = images_flat.shape\n",
"\n",
"print(f\"N image: {ntrain * npulse} (ntrain: {ntrain}, npulse: {npulse})\")\n",
"print(f\"Image size: {ny} x {nx} px\")\n",
"step_timer.done_step(\"Read flat-field images\")\n",
"\n",
"tm0 = time.monotonic()\n",
"tm_cm = time.monotonic() - tm0\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"step_timer.start()\n",
"flat, components, explained_variance_ratio = dffc.process_flat(\n",
" images_flat, dark, n_components)\n",
"step_timer.done_step(\"Process flat-field images\")"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Average flat-field"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"step_timer.start()\n",
"plot_camera_image(flat)\n",
"plt.show()"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Explained variance ratio"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"fig, ax = plt.subplots(1, 1, figsize=(10,4), tight_layout=True)\n",
"ax.semilogy(explained_variance_ratio, 'o')\n",
"ax.set_xticks(np.arange(len(explained_variance_ratio)))\n",
"ax.set_xlabel(\"Component no.\")\n",
"ax.set_ylabel(\"Variance fraction\")\n",
"plt.show()"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# The first principal components (up to 20)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"outputs": [],
"source": [
"source": [
"plot_images(components[:20], figsize=(13, 8))\n",
"for source, aggr in sources.items():\n",
"plt.show()\n",
" display(Markdown(f\"## {source}\"))\n",
"step_timer.done_step(\"Draw flat-field map and components\")"
"\n",
" # read\n",
" step_timer.start()\n",
" flat_dc = RunDirectory(f\"{in_folder}/r{flat_run:04d}\",\n",
" include=f\"RAW-R{flat_run:04d}-{aggr}-S*.h5\")\n",
" flat_dc = flat_dc.select([(source, image_key)])\n",
" key_data = flat_dc[source][image_key]\n",
"\n",
" images_flat = key_data.ndarray()\n",
" ntrain, npulse, ny, nx = images_flat.shape\n",
"\n",
" print(f\"N image: {ntrain * npulse} (ntrain: {ntrain}, npulse: {npulse})\")\n",
" print(f\"Image size: {ny} x {nx} px\")\n",
" step_timer.done_step(\"Read flat-field images\")\n",
"\n",
" # process\n",
" step_timer.start()\n",
" flat, components, explained_variance_ratio = dffc.process_flat(\n",
" images_flat, dark, n_components)\n",
"\n",
" module_constants = constants.setdefault(source, {})\n",
" module_constants[\"DynamicFF\"] = np.concatenate([flat[None, ...], components])\n",
" step_timer.done_step(\"Process flat-field images\")\n",
"\n",
" # draw plots\n",
" step_timer.start()\n",
" display(Markdown(\"### Average flat-field\"))\n",
" plot_camera_image(flat)\n",
" plt.show()\n",
"\n",
" display(Markdown(\"### Explained variance ratio\"))\n",
" fig, ax = plt.subplots(1, 1, figsize=(10,4), tight_layout=True)\n",
" ax.semilogy(explained_variance_ratio, 'o')\n",
" ax.set_xticks(np.arange(len(explained_variance_ratio)))\n",
" ax.set_xlabel(\"Component no.\")\n",
" ax.set_ylabel(\"Variance fraction\")\n",
" plt.show()\n",
"\n",
" display(Markdown(\"### The first principal components (up to 20)\"))\n",
" plot_images(components[:20], figsize=(13, 8))\n",
" plt.show()\n",
"\n",
" step_timer.done_step(\"Draw flat-field\")"
]
]
},
},
{
{
...
@@ -276,45 +253,29 @@
...
@@ -276,45 +253,29 @@
"# Output Folder Creation:\n",
"# Output Folder Creation:\n",
"os.makedirs(out_folder, exist_ok=True)\n",
"os.makedirs(out_folder, exist_ok=True)\n",
"\n",
"\n",
"db_module = \"SHIMADZU_HPVX2_M001\"\n",
"for source, module_constants in constants.items():\n",
"\n",
" for constant_name, data in module_constants.items():\n",
"constant_name = \"Offset\"\n",
" db_module = source_to_db[source]\n",
"\n",
"\n",
"conditions = {\n",
" conditions = {\n",
" 'Memory cells': {'value': 128},\n",
" 'Frame Size': {'value': 1.0},\n",
" 'Pixels X': {'value': flat.shape[1]},\n",
" }\n",
" 'Pixels Y': {'value': flat.shape[0]},\n",
"\n",
" 'FF components': {'value': components.shape[0]}\n",
" data_to_store = {\n",
"}\n",
" 'condition': conditions,\n",
"\n",
" 'db_module': db_module,\n",
"data_to_store = {\n",
" 'karabo_id': karabo_id,\n",
" 'condition': conditions,\n",
" 'constant': constant_name,\n",
" 'db_module': db_module,\n",
" 'data': data,\n",
" 'karabo_id': karabo_id,\n",
" 'creation_time': creation_time.replace(microsecond=0),\n",
" 'constant': constant_name,\n",
" 'file_loc': file_loc,\n",
" 'data': dark,\n",
" 'report': report,\n",
" 'creation_time': creation_time.replace(microsecond=0),\n",
" }\n",
" 'file_loc': file_loc,\n",
"\n",
" 'report': report,\n",
" ofile = f\"{out_folder}/const_{constant_name}_{db_module}.h5\"\n",
"}\n",
" if os.path.isfile(ofile):\n",
"\n",
" print(f'File {ofile} already exists and will be overwritten')\n",
"ofile = f\"{out_folder}/const_{constant_name}_{db_module}.h5\"\n",
" save_dict_to_hdf5(data_to_store, ofile)\n",
"if os.path.isfile(ofile):\n",
" print(f'File {ofile} already exists and will be overwritten')\n",
"save_dict_to_hdf5(data_to_store, ofile)\n",
"\n",
"\n",
"constant_name = \"ComponentsFF\"\n",
"\n",
"data_to_store.update({\n",
" 'constant': constant_name,\n",
" 'data': np.concatenate([flat[None, ...], components]),\n",
"})\n",
"\n",
"ofile = f\"{out_folder}/const_{constant_name}_{db_module}.h5\"\n",
"if os.path.isfile(ofile):\n",
" print(f'File {ofile} already exists and will be overwritten')\n",
"save_dict_to_hdf5(data_to_store, ofile)\n",
"\n",
"\n",
"step_timer.done_step(\"Storing calibration constants\")"
"step_timer.done_step(\"Storing calibration constants\")"
]
]
...
...
%% Cell type:markdown id: tags:
%% Cell type:markdown id: tags:
#
Shimadzu HPVX2
Characterization of dark and flat field
# Characterization of dark and flat field
for Dynamic Flat Field correction
Author: Egor Sobolev
Author: Egor Sobolev
Computation of dark offsets and flat-field principal components
Computation of dark offsets and flat-field principal components
%% Cell type:code id: tags:
%% Cell type:code id: tags:
```
python
```
python
in_folder
=
"
/gpfs/exfel/exp/SPB/202
121
/p00
2919
/raw
/
"
# input folder, required
in_folder
=
"
/gpfs/exfel/exp/SPB/202
430
/p
9
00
425
/raw
"
# input folder, required
out_folder
=
'
/gpfs/exfel/data/scratch/esobolev/test/shimadzu
'
# output folder, required
out_folder
=
'
/gpfs/exfel/data/scratch/esobolev/test/shimadzu
'
# output folder, required
metadata_folder
=
""
# Directory containing calibration_metadata.yml when run by xfel-calibrate
metadata_folder
=
""
# Directory containing calibration_metadata.yml when run by xfel-calibrate
dark_run
=
59
# which run to read data from, required
dark_run
=
1
# which run to read data from, required
flat_run
=
40
# which run to read
flat_run
=
2
# which run to read
# Data files parameters.
# Data files parameters.
karabo_da
=
[
'
HPVX01
'
]
# data aggregators
karabo_da
=
[
'
HPVX01
/1
'
,
'
HPVX01/2
'
]
# data aggregators
karabo_id
=
"
SPB_EHD_
HPVX2_2
"
# karabo prefix of Shimadzu HPV-X2 devices
karabo_id
=
"
SPB_EHD_
MIC
"
# karabo prefix of Shimadzu HPV-X2 devices
#receiver_id = "PNCCD_FMT-0" # inset for receiver devices
#receiver_id = "PNCCD_FMT-0" # inset for receiver devices
#path_template = 'RAW-R{:04d}-{}-S{{:05d}}.h5' # the template to use to access data
#path_template = 'RAW-R{:04d}-{}-S{{:05d}}.h5' # the template to use to access data
instrument_source_template
=
'
{}/CAM/CAMERA:daqOutput
'
# data source path in h5file. Template filled with karabo_id and receiver_id
instrument_source_template
=
'
SPB_EHD_MIC/CAM/HPVX2_{module}:daqOutput
'
# data source path in h5file.
#instrument_source_template = 'SPB_EHD_HPVX2_{module}/CAM/CAMERA:daqOutput'
image_key
=
"
data.image.pixels
"
# image data key in Karabo or exdf notation
image_key
=
"
data.image.pixels
"
# image data key in Karabo or exdf notation
db_module_template
=
"
Shimadzu_HPVX2_{}
"
# Database access parameters.
# Database access parameters.
use_dir_creation_date
=
True
# use dir creation date as data production reference date
use_dir_creation_date
=
True
# use dir creation date as data production reference date
cal_db_interface
=
"
tcp://max-exfl-cal001:8021
"
# calibration DB interface to use
cal_db_interface
=
"
tcp://max-exfl-cal001:8021
"
# calibration DB interface to use
cal_db_timeout
=
300000
# timeout on caldb requests
cal_db_timeout
=
300000
# timeout on caldb requests
db_output
=
False
# if True, the notebook sends dark constants to the calibration database
db_output
=
False
# if True, the notebook sends dark constants to the calibration database
local_output
=
True
# if True, the notebook saves dark constants locally
local_output
=
True
# if True, the notebook saves dark constants locally
creation_time
=
""
# To overwrite the measured creation_time. Required Format: YYYY-MM-DD HR:MN:SC.00 e.g. 2019-07-04 11:02:41.00
creation_time
=
""
# To overwrite the measured creation_time. Required Format: YYYY-MM-DD HR:MN:SC.00 e.g. 2019-07-04 11:02:41.00
n_components
=
50
n_components
=
50
# Number of principal components to compute
```
```
%% Cell type:code id: tags:
%% Cell type:code id: tags:
```
python
```
python
import
datetime
import
datetime
import
os
import
os
import
warnings
import
warnings
warnings
.
filterwarnings
(
'
ignore
'
)
warnings
.
filterwarnings
(
'
ignore
'
)
import
time
import
time
import
numpy
as
np
import
numpy
as
np
import
matplotlib.pyplot
as
plt
import
matplotlib.pyplot
as
plt
from
IPython.display
import
display
,
Markdown
from
extra_data
import
RunDirectory
from
extra_data
import
RunDirectory
%
matplotlib
inline
%
matplotlib
inline
from
cal_tools.step_timing
import
StepTimer
from
cal_tools.step_timing
import
StepTimer
from
cal_tools.tools
import
(
from
cal_tools.tools
import
(
get_dir_creation_date
,
get_dir_creation_date
,
get_pdu_from_db
,
get_pdu_from_db
,
get_random_db_interface
,
get_random_db_interface
,
get_report
,
get_report
,
save_const_to_h5
,
save_const_to_h5
,
save_dict_to_hdf5
,
save_dict_to_hdf5
,
send_to_db
,
send_to_db
,
run_prop_seq_from_path
,
run_prop_seq_from_path
,
)
)
import
dffc
import
dffc
from
dffc.draw
import
plot_images
,
plot_camera_image
from
dffc.draw
import
plot_images
,
plot_camera_image
```
```
%% Cell type:code id: tags:
%% Cell type:code id: tags:
```
python
```
python
creation_time
=
None
creation_time
=
None
if
use_dir_creation_date
:
if
use_dir_creation_date
:
creation_time
=
get_dir_creation_date
(
in_folder
,
max
(
dark_run
,
flat_run
))
creation_time
=
get_dir_creation_date
(
in_folder
,
max
(
dark_run
,
flat_run
))
print
(
f
"
Using
{
creation_time
}
as creation time of constant.
"
)
print
(
f
"
Using
{
creation_time
}
as creation time of constant.
"
)
run
,
prop
,
seq
=
run_prop_seq_from_path
(
in_folder
)
run
,
prop
,
seq
=
run_prop_seq_from_path
(
in_folder
)
file_loc
=
f
'
proposal:
{
prop
}
, runs:
{
dark_run
}
{
flat_run
}
'
file_loc
=
f
'
proposal:
{
prop
}
, runs:
{
dark_run
}
{
flat_run
}
'
# Read report path and create file location tuple to add with the injection
# Read report path and create file location tuple to add with the injection
file_loc
=
f
"
proposal:
{
prop
}
runs:
{
dark_run
}
{
flat_run
}
"
file_loc
=
f
"
proposal:
{
prop
}
runs:
{
dark_run
}
{
flat_run
}
"
report
=
get_report
(
metadata_folder
)
report
=
get_report
(
metadata_folder
)
cal_db_interface
=
get_random_db_interface
(
cal_db_interface
)
cal_db_interface
=
get_random_db_interface
(
cal_db_interface
)
print
(
f
'
Calibration database interface:
{
cal_db_interface
}
'
)
print
(
f
'
Calibration database interface:
{
cal_db_interface
}
'
)
print
()
instrument
,
part
,
component
=
karabo_id
.
split
(
'
_
'
)
instrument
=
karabo_id
.
split
(
"
_
"
)[
0
]
sources
=
{}
source
=
instrument_source_template
.
format
(
karabo_id
)
source_to_db
=
{}
print
(
"
Sources:
"
)
for
da
in
karabo_da
:
aggr
,
_
,
module
=
da
.
partition
(
'
/
'
)
source_name
=
instrument_source_template
.
format
(
instrument
=
instrument
,
part
=
part
,
component
=
component
,
module
=
module
)
sources
[
source_name
]
=
aggr
source_to_db
[
source_name
]
=
db_module_template
.
format
(
module
)
print
(
'
-
'
,
source_name
)
print
()
print
(
f
"
Detector in use is
{
karabo_id
}
"
)
print
(
f
"
Detector in use is
{
karabo_id
}
"
)
print
(
f
"
Instrument
{
instrument
}
"
)
print
(
f
"
Instrument
{
instrument
}
"
)
step_timer
=
StepTimer
()
step_timer
=
StepTimer
()
constants
=
{}
```
```
%% Cell type:markdown id: tags:
%% Cell type:markdown id: tags:
# Offset map
# Offset map
%% Cell type:code id: tags:
%% Cell type:code id: tags:
```
python
```
python
step_timer
.
start
()
for
source
,
aggr
in
sources
.
items
():
display
(
Markdown
(
f
"
##
{
source
}
"
))
dark_dc
=
RunDirectory
(
f
"
{
in_folder
}
/r
{
dark_run
:
04
d
}
"
)
# read
dark_dc
=
dark_dc
.
select
([(
source
,
image_key
)]
)
step_timer
.
start
(
)
key_data
=
dark_dc
[
source
][
image_key
]
dark_dc
=
RunDirectory
(
f
"
{
in_folder
}
/r
{
dark_run
:
04
d
}
"
,
include
=
f
"
RAW-R
{
dark_run
:
04
d
}
-
{
aggr
}
-S*.h5
"
)
images_dark
=
key_data
.
ndarray
(
)
dark_dc
=
dark_dc
.
select
([(
source
,
image_key
)]
)
ntrain
,
npulse
,
ny
,
nx
=
images_dark
.
shape
key_data
=
dark_dc
[
source
][
image_key
]
print
(
f
"
N image:
{
ntrain
*
npulse
}
(ntrain:
{
ntrain
}
, npulse:
{
npulse
}
)
"
)
images_dark
=
key_data
.
ndarray
(
)
print
(
f
"
Image size:
{
ny
}
x
{
nx
}
px
"
)
ntrain
,
npulse
,
ny
,
nx
=
images_dark
.
shape
step_timer
.
done_step
(
"
Read dark images
"
)
```
print
(
f
"
N image:
{
ntrain
*
npulse
}
(ntrain:
{
ntrain
}
, npulse:
{
npulse
}
)
"
)
print
(
f
"
Image size:
{
ny
}
x
{
nx
}
px
"
)
%% Cell type:code id: tags:
step_timer
.
done_step
(
"
Read dark images
"
)
```
python
# process
step_timer
.
start
()
step_timer
.
start
()
dark
=
dffc
.
process_dark
(
images_dark
)
dark
=
dffc
.
process_dark
(
images_dark
)
step_timer
.
done_step
(
"
Process dark images
"
)
module_constants
=
constants
.
setdefault
(
source
,
{}
)
```
module_constants
[
"
Offset
"
]
=
dark
step_timer
.
done_step
(
"
Process dark images
"
)
%% Cell type:code id: tags:
display
()
```
python
# draw plots
step_timer
.
start
()
step_timer
.
start
()
plot_camera_image
(
dark
)
plot_camera_image
(
dark
)
plt
.
show
()
plt
.
show
()
step_timer
.
done_step
(
"
Draw offset
map
"
)
step_timer
.
done_step
(
"
Draw offset
s
"
)
```
```
%% Cell type:markdown id: tags:
%% Cell type:markdown id: tags:
# Flat-field PCA decomposition
# Flat-field PCA decomposition
%% Cell type:code id: tags:
%% Cell type:code id: tags:
```
python
```
python
step_timer
.
start
()
for
source
,
aggr
in
sources
.
items
():
display
(
Markdown
(
f
"
##
{
source
}
"
))
flat_dc
=
RunDirectory
(
f
"
{
in_folder
}
/r
{
flat_run
:
04
d
}
"
)
flat_dc
=
flat_dc
.
select
([(
source
,
image_key
)])
key_data
=
flat_dc
[
source
][
image_key
]
images_flat
=
key_data
.
ndarray
()
ntrain
,
npulse
,
ny
,
nx
=
images_flat
.
shape
print
(
f
"
N image:
{
ntrain
*
npulse
}
(ntrain:
{
ntrain
}
, npulse:
{
npulse
}
)
"
)
print
(
f
"
Image size:
{
ny
}
x
{
nx
}
px
"
)
step_timer
.
done_step
(
"
Read flat-field images
"
)
tm0
=
time
.
monotonic
()
tm_cm
=
time
.
monotonic
()
-
tm0
```
%% Cell type:code id: tags:
# read
step_timer
.
start
()
flat_dc
=
RunDirectory
(
f
"
{
in_folder
}
/r
{
flat_run
:
04
d
}
"
,
include
=
f
"
RAW-R
{
flat_run
:
04
d
}
-
{
aggr
}
-S*.h5
"
)
flat_dc
=
flat_dc
.
select
([(
source
,
image_key
)])
key_data
=
flat_dc
[
source
][
image_key
]
images_flat
=
key_data
.
ndarray
()
ntrain
,
npulse
,
ny
,
nx
=
images_flat
.
shape
print
(
f
"
N image:
{
ntrain
*
npulse
}
(ntrain:
{
ntrain
}
, npulse:
{
npulse
}
)
"
)
print
(
f
"
Image size:
{
ny
}
x
{
nx
}
px
"
)
step_timer
.
done_step
(
"
Read flat-field images
"
)
# process
step_timer
.
start
()
flat
,
components
,
explained_variance_ratio
=
dffc
.
process_flat
(
images_flat
,
dark
,
n_components
)
module_constants
=
constants
.
setdefault
(
source
,
{})
module_constants
[
"
DynamicFF
"
]
=
np
.
concatenate
([
flat
[
None
,
...],
components
])
step_timer
.
done_step
(
"
Process flat-field images
"
)
# draw plots
step_timer
.
start
()
display
(
Markdown
(
"
### Average flat-field
"
))
plot_camera_image
(
flat
)
plt
.
show
()
display
(
Markdown
(
"
### Explained variance ratio
"
))
fig
,
ax
=
plt
.
subplots
(
1
,
1
,
figsize
=
(
10
,
4
),
tight_layout
=
True
)
ax
.
semilogy
(
explained_variance_ratio
,
'
o
'
)
ax
.
set_xticks
(
np
.
arange
(
len
(
explained_variance_ratio
)))
ax
.
set_xlabel
(
"
Component no.
"
)
ax
.
set_ylabel
(
"
Variance fraction
"
)
plt
.
show
()
display
(
Markdown
(
"
### The first principal components (up to 20)
"
))
plot_images
(
components
[:
20
],
figsize
=
(
13
,
8
))
plt
.
show
()
```
python
step_timer
.
done_step
(
"
Draw flat-field
"
)
step_timer
.
start
()
flat
,
components
,
explained_variance_ratio
=
dffc
.
process_flat
(
images_flat
,
dark
,
n_components
)
step_timer
.
done_step
(
"
Process flat-field images
"
)
```
%% Cell type:markdown id: tags:
## Average flat-field
%% Cell type:code id: tags:
```
python
step_timer
.
start
()
plot_camera_image
(
flat
)
plt
.
show
()
```
%% Cell type:markdown id: tags:
## Explained variance ratio
%% Cell type:code id: tags:
```
python
fig
,
ax
=
plt
.
subplots
(
1
,
1
,
figsize
=
(
10
,
4
),
tight_layout
=
True
)
ax
.
semilogy
(
explained_variance_ratio
,
'
o
'
)
ax
.
set_xticks
(
np
.
arange
(
len
(
explained_variance_ratio
)))
ax
.
set_xlabel
(
"
Component no.
"
)
ax
.
set_ylabel
(
"
Variance fraction
"
)
plt
.
show
()
```
%% Cell type:markdown id: tags:
# The first principal components (up to 20)
%% Cell type:code id: tags:
```
python
plot_images
(
components
[:
20
],
figsize
=
(
13
,
8
))
plt
.
show
()
step_timer
.
done_step
(
"
Draw flat-field map and components
"
)
```
```
%% Cell type:markdown id: tags:
%% Cell type:markdown id: tags:
## Calibration constants
## Calibration constants
%% Cell type:code id: tags:
%% Cell type:code id: tags:
```
python
```
python
step_timer
.
start
()
step_timer
.
start
()
# Output Folder Creation:
# Output Folder Creation:
os
.
makedirs
(
out_folder
,
exist_ok
=
True
)
os
.
makedirs
(
out_folder
,
exist_ok
=
True
)
db_module
=
"
SHIMADZU_HPVX2_M001
"
for
source
,
module_constants
in
constants
.
items
():
for
constant_name
,
data
in
module_constants
.
items
():
constant_name
=
"
Offset
"
db_module
=
source_to_db
[
source
]
conditions
=
{
conditions
=
{
'
Memory cells
'
:
{
'
value
'
:
128
},
'
Frame Size
'
:
{
'
value
'
:
1.0
},
'
Pixels X
'
:
{
'
value
'
:
flat
.
shape
[
1
]},
}
'
Pixels Y
'
:
{
'
value
'
:
flat
.
shape
[
0
]},
'
FF components
'
:
{
'
value
'
:
components
.
shape
[
0
]}
data_to_store
=
{
}
'
condition
'
:
conditions
,
'
db_module
'
:
db_module
,
data_to_store
=
{
'
karabo_id
'
:
karabo_id
,
'
condition
'
:
conditions
,
'
constant
'
:
constant_name
,
'
db_module
'
:
db_module
,
'
data
'
:
data
,
'
karabo_id
'
:
karabo_id
,
'
creation_time
'
:
creation_time
.
replace
(
microsecond
=
0
),
'
constant
'
:
constant_name
,
'
file_loc
'
:
file_loc
,
'
data
'
:
dark
,
'
report
'
:
report
,
'
creation_time
'
:
creation_time
.
replace
(
microsecond
=
0
),
}
'
file_loc
'
:
file_loc
,
'
report
'
:
report
,
ofile
=
f
"
{
out_folder
}
/const_
{
constant_name
}
_
{
db_module
}
.h5
"
}
if
os
.
path
.
isfile
(
ofile
):
print
(
f
'
File
{
ofile
}
already exists and will be overwritten
'
)
ofile
=
f
"
{
out_folder
}
/const_
{
constant_name
}
_
{
db_module
}
.h5
"
save_dict_to_hdf5
(
data_to_store
,
ofile
)
if
os
.
path
.
isfile
(
ofile
):
print
(
f
'
File
{
ofile
}
already exists and will be overwritten
'
)
save_dict_to_hdf5
(
data_to_store
,
ofile
)
constant_name
=
"
ComponentsFF
"
data_to_store
.
update
({
'
constant
'
:
constant_name
,
'
data
'
:
np
.
concatenate
([
flat
[
None
,
...],
components
]),
})
ofile
=
f
"
{
out_folder
}
/const_
{
constant_name
}
_
{
db_module
}
.h5
"
if
os
.
path
.
isfile
(
ofile
):
print
(
f
'
File
{
ofile
}
already exists and will be overwritten
'
)
save_dict_to_hdf5
(
data_to_store
,
ofile
)
step_timer
.
done_step
(
"
Storing calibration constants
"
)
step_timer
.
done_step
(
"
Storing calibration constants
"
)
```
```
%% Cell type:code id: tags:
%% Cell type:code id: tags:
```
python
```
python
print
(
f
"
Total processing time
{
step_timer
.
timespan
()
:
.
01
f
}
s
"
)
print
(
f
"
Total processing time
{
step_timer
.
timespan
()
:
.
01
f
}
s
"
)
step_timer
.
print_summary
()
step_timer
.
print_summary
()
```
```
...
...
This diff is collapsed.
Click to expand it.
Preview
0%
Loading
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Save comment
Cancel
Please
register
or
sign in
to comment