Skip to content
GitLab
Explore
Sign in
Primary navigation
Search or go to…
Project
P
pycalibration
Manage
Activity
Members
Labels
Code
Merge requests
Repository
Branches
Commits
Tags
Repository graph
Compare revisions
Deploy
Model registry
Analyze
Contributor analytics
Model experiments
Help
Help
Support
GitLab documentation
Compare GitLab plans
Community forum
Contribute to GitLab
Provide feedback
Keyboard shortcuts
?
Snippets
Groups
Projects
Show more breadcrumbs
calibration
pycalibration
Commits
2d3f03bf
Commit
2d3f03bf
authored
11 months ago
by
Egor Sobolev
Committed by
Philipp Schmidt
11 months ago
Browse files
Options
Downloads
Patches
Plain Diff
Support multiple module detector in Characterize_DynamicFF_NBC.ipynb
parent
5ca049e8
No related branches found
Branches containing commit
No related tags found
Tags containing commit
1 merge request
!939
[Generic][Shimadzu] Dynamic flat-field characterization and correction for MHz microscopy
Changes
1
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
notebooks/DynamicFF/Characterize_DynamicFF_NBC.ipynb
+132
-171
132 additions, 171 deletions
notebooks/DynamicFF/Characterize_DynamicFF_NBC.ipynb
with
132 additions
and
171 deletions
notebooks/DynamicFF/Characterize_DynamicFF_NBC.ipynb
+
132
−
171
View file @
2d3f03bf
...
...
@@ -4,7 +4,7 @@
"cell_type": "markdown",
"metadata": {},
"source": [
"#
Shimadzu HPVX2
Characterization of dark and flat field\n",
"# Characterization of dark and flat field
for Dynamic Flat Field correction
\n",
"\n",
"Author: Egor Sobolev\n",
"\n",
...
...
@@ -17,21 +17,24 @@
"metadata": {},
"outputs": [],
"source": [
"in_folder = \"/gpfs/exfel/exp/SPB/202
121
/p00
2919
/raw
/
\" # input folder, required\n",
"in_folder = \"/gpfs/exfel/exp/SPB/202
430
/p
9
00
425
/raw\" # input folder, required\n",
"out_folder = '/gpfs/exfel/data/scratch/esobolev/test/shimadzu' # output folder, required\n",
"metadata_folder = \"\" # Directory containing calibration_metadata.yml when run by xfel-calibrate\n",
"dark_run =
59
# which run to read data from, required\n",
"flat_run =
40
# which run to read\n",
"dark_run =
1
# which run to read data from, required\n",
"flat_run =
2
# which run to read\n",
"\n",
"# Data files parameters.\n",
"karabo_da = ['HPVX01'] # data aggregators\n",
"karabo_id = \"SPB_EHD_
HPVX2_2
\" # karabo prefix of Shimadzu HPV-X2 devices\n",
"karabo_da = ['HPVX01
/1', 'HPVX01/2
'] # data aggregators\n",
"karabo_id = \"SPB_EHD_
MIC
\" # karabo prefix of Shimadzu HPV-X2 devices\n",
"\n",
"#receiver_id = \"PNCCD_FMT-0\" # inset for receiver devices\n",
"#path_template = 'RAW-R{:04d}-{}-S{{:05d}}.h5' # the template to use to access data\n",
"instrument_source_template = '{}/CAM/CAMERA:daqOutput' # data source path in h5file. Template filled with karabo_id and receiver_id\n",
"instrument_source_template = 'SPB_EHD_MIC/CAM/HPVX2_{module}:daqOutput' # data source path in h5file.\n",
"#instrument_source_template = 'SPB_EHD_HPVX2_{module}/CAM/CAMERA:daqOutput'\n",
"image_key = \"data.image.pixels\" # image data key in Karabo or exdf notation\n",
"\n",
"db_module_template = \"Shimadzu_HPVX2_{}\"\n",
"\n",
"# Database access parameters.\n",
"use_dir_creation_date = True # use dir creation date as data production reference date\n",
"cal_db_interface = \"tcp://max-exfl-cal001:8021\" # calibration DB interface to use\n",
...
...
@@ -40,7 +43,7 @@
"local_output = True # if True, the notebook saves dark constants locally\n",
"creation_time = \"\" # To overwrite the measured creation_time. Required Format: YYYY-MM-DD HR:MN:SC.00 e.g. 2019-07-04 11:02:41.00\n",
"\n",
"n_components = 50"
"n_components = 50
# Number of principal components to compute
"
]
},
{
...
...
@@ -58,6 +61,7 @@
"import time\n",
"import numpy as np\n",
"import matplotlib.pyplot as plt\n",
"from IPython.display import display, Markdown\n",
"\n",
"from extra_data import RunDirectory\n",
"\n",
...
...
@@ -99,14 +103,29 @@
"report = get_report(metadata_folder)\n",
"cal_db_interface = get_random_db_interface(cal_db_interface)\n",
"print(f'Calibration database interface: {cal_db_interface}')\n",
"\n",
"instrument = karabo_id.split(\"_\")[0]\n",
"source = instrument_source_template.format(karabo_id)\n",
"print()\n",
"\n",
"instrument, part, component = karabo_id.split('_')\n",
"\n",
"sources = {}\n",
"source_to_db = {}\n",
"print(\"Sources:\")\n",
"for da in karabo_da:\n",
" aggr, _, module = da.partition('/')\n",
" source_name = instrument_source_template.format(\n",
" instrument=instrument, part=part, component=component,\n",
" module=module\n",
" )\n",
" sources[source_name] = aggr\n",
" source_to_db[source_name] = db_module_template.format(module)\n",
" print('-', source_name)\n",
"print()\n",
"\n",
"print(f\"Detector in use is {karabo_id}\")\n",
"print(f\"Instrument {instrument}\")\n",
"\n",
"step_timer = StepTimer()"
"step_timer = StepTimer()\n",
"constants = {}"
]
},
{
...
...
@@ -122,41 +141,36 @@
"metadata": {},
"outputs": [],
"source": [
"step_timer.start()\n",
"\n",
"dark_dc = RunDirectory(f\"{in_folder}/r{dark_run:04d}\")\n",
"dark_dc = dark_dc.select([(source, image_key)])\n",
"key_data = dark_dc[source][image_key]\n",
"\n",
"images_dark = key_data.ndarray()\n",
"ntrain, npulse, ny, nx = images_dark.shape\n",
"\n",
"print(f\"N image: {ntrain * npulse} (ntrain: {ntrain}, npulse: {npulse})\")\n",
"print(f\"Image size: {ny} x {nx} px\")\n",
"step_timer.done_step(\"Read dark images\")"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"step_timer.start()\n",
"dark = dffc.process_dark(images_dark)\n",
"step_timer.done_step(\"Process dark images\")"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"step_timer.start()\n",
"plot_camera_image(dark)\n",
"plt.show()\n",
"step_timer.done_step(\"Draw offset map\")"
"for source, aggr in sources.items():\n",
" display(Markdown(f\"## {source}\"))\n",
"\n",
" # read\n",
" step_timer.start()\n",
" dark_dc = RunDirectory(f\"{in_folder}/r{dark_run:04d}\",\n",
" include=f\"RAW-R{dark_run:04d}-{aggr}-S*.h5\")\n",
" dark_dc = dark_dc.select([(source, image_key)])\n",
" key_data = dark_dc[source][image_key]\n",
"\n",
" images_dark = key_data.ndarray()\n",
" ntrain, npulse, ny, nx = images_dark.shape\n",
"\n",
" print(f\"N image: {ntrain * npulse} (ntrain: {ntrain}, npulse: {npulse})\")\n",
" print(f\"Image size: {ny} x {nx} px\")\n",
" step_timer.done_step(\"Read dark images\")\n",
"\n",
" # process\n",
" step_timer.start()\n",
" dark = dffc.process_dark(images_dark)\n",
" module_constants = constants.setdefault(source, {})\n",
" module_constants[\"Offset\"] = dark\n",
" step_timer.done_step(\"Process dark images\")\n",
" display()\n",
"\n",
" # draw plots\n",
" step_timer.start()\n",
" plot_camera_image(dark)\n",
" plt.show()\n",
" step_timer.done_step(\"Draw offsets\")"
]
},
{
...
...
@@ -169,93 +183,56 @@
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"step_timer.start()\n",
"\n",
"flat_dc = RunDirectory(f\"{in_folder}/r{flat_run:04d}\")\n",
"flat_dc = flat_dc.select([(source, image_key)])\n",
"key_data = flat_dc[source][image_key]\n",
"\n",
"images_flat = key_data.ndarray()\n",
"ntrain, npulse, ny, nx = images_flat.shape\n",
"\n",
"print(f\"N image: {ntrain * npulse} (ntrain: {ntrain}, npulse: {npulse})\")\n",
"print(f\"Image size: {ny} x {nx} px\")\n",
"step_timer.done_step(\"Read flat-field images\")\n",
"\n",
"tm0 = time.monotonic()\n",
"tm_cm = time.monotonic() - tm0\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"step_timer.start()\n",
"flat, components, explained_variance_ratio = dffc.process_flat(\n",
" images_flat, dark, n_components)\n",
"step_timer.done_step(\"Process flat-field images\")"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Average flat-field"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"step_timer.start()\n",
"plot_camera_image(flat)\n",
"plt.show()"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Explained variance ratio"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"fig, ax = plt.subplots(1, 1, figsize=(10,4), tight_layout=True)\n",
"ax.semilogy(explained_variance_ratio, 'o')\n",
"ax.set_xticks(np.arange(len(explained_variance_ratio)))\n",
"ax.set_xlabel(\"Component no.\")\n",
"ax.set_ylabel(\"Variance fraction\")\n",
"plt.show()"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# The first principal components (up to 20)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"metadata": {
"scrolled": false
},
"outputs": [],
"source": [
"plot_images(components[:20], figsize=(13, 8))\n",
"plt.show()\n",
"step_timer.done_step(\"Draw flat-field map and components\")"
"for source, aggr in sources.items():\n",
" display(Markdown(f\"## {source}\"))\n",
"\n",
" # read\n",
" step_timer.start()\n",
" flat_dc = RunDirectory(f\"{in_folder}/r{flat_run:04d}\",\n",
" include=f\"RAW-R{flat_run:04d}-{aggr}-S*.h5\")\n",
" flat_dc = flat_dc.select([(source, image_key)])\n",
" key_data = flat_dc[source][image_key]\n",
"\n",
" images_flat = key_data.ndarray()\n",
" ntrain, npulse, ny, nx = images_flat.shape\n",
"\n",
" print(f\"N image: {ntrain * npulse} (ntrain: {ntrain}, npulse: {npulse})\")\n",
" print(f\"Image size: {ny} x {nx} px\")\n",
" step_timer.done_step(\"Read flat-field images\")\n",
"\n",
" # process\n",
" step_timer.start()\n",
" flat, components, explained_variance_ratio = dffc.process_flat(\n",
" images_flat, dark, n_components)\n",
"\n",
" module_constants = constants.setdefault(source, {})\n",
" module_constants[\"DynamicFF\"] = np.concatenate([flat[None, ...], components])\n",
" step_timer.done_step(\"Process flat-field images\")\n",
"\n",
" # draw plots\n",
" step_timer.start()\n",
" display(Markdown(\"### Average flat-field\"))\n",
" plot_camera_image(flat)\n",
" plt.show()\n",
"\n",
" display(Markdown(\"### Explained variance ratio\"))\n",
" fig, ax = plt.subplots(1, 1, figsize=(10,4), tight_layout=True)\n",
" ax.semilogy(explained_variance_ratio, 'o')\n",
" ax.set_xticks(np.arange(len(explained_variance_ratio)))\n",
" ax.set_xlabel(\"Component no.\")\n",
" ax.set_ylabel(\"Variance fraction\")\n",
" plt.show()\n",
"\n",
" display(Markdown(\"### The first principal components (up to 20)\"))\n",
" plot_images(components[:20], figsize=(13, 8))\n",
" plt.show()\n",
"\n",
" step_timer.done_step(\"Draw flat-field\")"
]
},
{
...
...
@@ -276,45 +253,29 @@
"# Output Folder Creation:\n",
"os.makedirs(out_folder, exist_ok=True)\n",
"\n",
"db_module = \"SHIMADZU_HPVX2_M001\"\n",
"\n",
"constant_name = \"Offset\"\n",
"\n",
"conditions = {\n",
" 'Memory cells': {'value': 128},\n",
" 'Pixels X': {'value': flat.shape[1]},\n",
" 'Pixels Y': {'value': flat.shape[0]},\n",
" 'FF components': {'value': components.shape[0]}\n",
"}\n",
"\n",
"data_to_store = {\n",
" 'condition': conditions,\n",
" 'db_module': db_module,\n",
" 'karabo_id': karabo_id,\n",
" 'constant': constant_name,\n",
" 'data': dark,\n",
" 'creation_time': creation_time.replace(microsecond=0),\n",
" 'file_loc': file_loc,\n",
" 'report': report,\n",
"}\n",
"\n",
"ofile = f\"{out_folder}/const_{constant_name}_{db_module}.h5\"\n",
"if os.path.isfile(ofile):\n",
" print(f'File {ofile} already exists and will be overwritten')\n",
"save_dict_to_hdf5(data_to_store, ofile)\n",
"\n",
"\n",
"constant_name = \"ComponentsFF\"\n",
"\n",
"data_to_store.update({\n",
" 'constant': constant_name,\n",
" 'data': np.concatenate([flat[None, ...], components]),\n",
"})\n",
"\n",
"ofile = f\"{out_folder}/const_{constant_name}_{db_module}.h5\"\n",
"if os.path.isfile(ofile):\n",
" print(f'File {ofile} already exists and will be overwritten')\n",
"save_dict_to_hdf5(data_to_store, ofile)\n",
"for source, module_constants in constants.items():\n",
" for constant_name, data in module_constants.items():\n",
" db_module = source_to_db[source]\n",
"\n",
" conditions = {\n",
" 'Frame Size': {'value': 1.0},\n",
" }\n",
"\n",
" data_to_store = {\n",
" 'condition': conditions,\n",
" 'db_module': db_module,\n",
" 'karabo_id': karabo_id,\n",
" 'constant': constant_name,\n",
" 'data': data,\n",
" 'creation_time': creation_time.replace(microsecond=0),\n",
" 'file_loc': file_loc,\n",
" 'report': report,\n",
" }\n",
"\n",
" ofile = f\"{out_folder}/const_{constant_name}_{db_module}.h5\"\n",
" if os.path.isfile(ofile):\n",
" print(f'File {ofile} already exists and will be overwritten')\n",
" save_dict_to_hdf5(data_to_store, ofile)\n",
"\n",
"step_timer.done_step(\"Storing calibration constants\")"
]
...
...
%% Cell type:markdown id: tags:
#
Shimadzu HPVX2
Characterization of dark and flat field
# Characterization of dark and flat field
for Dynamic Flat Field correction
Author: Egor Sobolev
Computation of dark offsets and flat-field principal components
%% Cell type:code id: tags:
```
python
in_folder
=
"
/gpfs/exfel/exp/SPB/202
121
/p00
2919
/raw
/
"
# input folder, required
in_folder
=
"
/gpfs/exfel/exp/SPB/202
430
/p
9
00
425
/raw
"
# input folder, required
out_folder
=
'
/gpfs/exfel/data/scratch/esobolev/test/shimadzu
'
# output folder, required
metadata_folder
=
""
# Directory containing calibration_metadata.yml when run by xfel-calibrate
dark_run
=
59
# which run to read data from, required
flat_run
=
40
# which run to read
dark_run
=
1
# which run to read data from, required
flat_run
=
2
# which run to read
# Data files parameters.
karabo_da
=
[
'
HPVX01
'
]
# data aggregators
karabo_id
=
"
SPB_EHD_
HPVX2_2
"
# karabo prefix of Shimadzu HPV-X2 devices
karabo_da
=
[
'
HPVX01
/1
'
,
'
HPVX01/2
'
]
# data aggregators
karabo_id
=
"
SPB_EHD_
MIC
"
# karabo prefix of Shimadzu HPV-X2 devices
#receiver_id = "PNCCD_FMT-0" # inset for receiver devices
#path_template = 'RAW-R{:04d}-{}-S{{:05d}}.h5' # the template to use to access data
instrument_source_template
=
'
{}/CAM/CAMERA:daqOutput
'
# data source path in h5file. Template filled with karabo_id and receiver_id
instrument_source_template
=
'
SPB_EHD_MIC/CAM/HPVX2_{module}:daqOutput
'
# data source path in h5file.
#instrument_source_template = 'SPB_EHD_HPVX2_{module}/CAM/CAMERA:daqOutput'
image_key
=
"
data.image.pixels
"
# image data key in Karabo or exdf notation
db_module_template
=
"
Shimadzu_HPVX2_{}
"
# Database access parameters.
use_dir_creation_date
=
True
# use dir creation date as data production reference date
cal_db_interface
=
"
tcp://max-exfl-cal001:8021
"
# calibration DB interface to use
cal_db_timeout
=
300000
# timeout on caldb requests
db_output
=
False
# if True, the notebook sends dark constants to the calibration database
local_output
=
True
# if True, the notebook saves dark constants locally
creation_time
=
""
# To overwrite the measured creation_time. Required Format: YYYY-MM-DD HR:MN:SC.00 e.g. 2019-07-04 11:02:41.00
n_components
=
50
n_components
=
50
# Number of principal components to compute
```
%% Cell type:code id: tags:
```
python
import
datetime
import
os
import
warnings
warnings
.
filterwarnings
(
'
ignore
'
)
import
time
import
numpy
as
np
import
matplotlib.pyplot
as
plt
from
IPython.display
import
display
,
Markdown
from
extra_data
import
RunDirectory
%
matplotlib
inline
from
cal_tools.step_timing
import
StepTimer
from
cal_tools.tools
import
(
get_dir_creation_date
,
get_pdu_from_db
,
get_random_db_interface
,
get_report
,
save_const_to_h5
,
save_dict_to_hdf5
,
send_to_db
,
run_prop_seq_from_path
,
)
import
dffc
from
dffc.draw
import
plot_images
,
plot_camera_image
```
%% Cell type:code id: tags:
```
python
creation_time
=
None
if
use_dir_creation_date
:
creation_time
=
get_dir_creation_date
(
in_folder
,
max
(
dark_run
,
flat_run
))
print
(
f
"
Using
{
creation_time
}
as creation time of constant.
"
)
run
,
prop
,
seq
=
run_prop_seq_from_path
(
in_folder
)
file_loc
=
f
'
proposal:
{
prop
}
, runs:
{
dark_run
}
{
flat_run
}
'
# Read report path and create file location tuple to add with the injection
file_loc
=
f
"
proposal:
{
prop
}
runs:
{
dark_run
}
{
flat_run
}
"
report
=
get_report
(
metadata_folder
)
cal_db_interface
=
get_random_db_interface
(
cal_db_interface
)
print
(
f
'
Calibration database interface:
{
cal_db_interface
}
'
)
print
()
instrument
,
part
,
component
=
karabo_id
.
split
(
'
_
'
)
instrument
=
karabo_id
.
split
(
"
_
"
)[
0
]
source
=
instrument_source_template
.
format
(
karabo_id
)
sources
=
{}
source_to_db
=
{}
print
(
"
Sources:
"
)
for
da
in
karabo_da
:
aggr
,
_
,
module
=
da
.
partition
(
'
/
'
)
source_name
=
instrument_source_template
.
format
(
instrument
=
instrument
,
part
=
part
,
component
=
component
,
module
=
module
)
sources
[
source_name
]
=
aggr
source_to_db
[
source_name
]
=
db_module_template
.
format
(
module
)
print
(
'
-
'
,
source_name
)
print
()
print
(
f
"
Detector in use is
{
karabo_id
}
"
)
print
(
f
"
Instrument
{
instrument
}
"
)
step_timer
=
StepTimer
()
constants
=
{}
```
%% Cell type:markdown id: tags:
# Offset map
%% Cell type:code id: tags:
```
python
step_timer
.
start
()
for
source
,
aggr
in
sources
.
items
():
display
(
Markdown
(
f
"
##
{
source
}
"
))
dark_dc
=
RunDirectory
(
f
"
{
in_folder
}
/r
{
dark_run
:
04
d
}
"
)
dark_dc
=
dark_dc
.
select
([(
source
,
image_key
)]
)
key_data
=
dark_dc
[
source
][
image_key
]
images_dark
=
key_data
.
ndarray
(
)
ntrain
,
npulse
,
ny
,
nx
=
images_dark
.
shape
print
(
f
"
N image:
{
ntrain
*
npulse
}
(ntrain:
{
ntrain
}
, npulse:
{
npulse
}
)
"
)
print
(
f
"
Image size:
{
ny
}
x
{
nx
}
px
"
)
step_timer
.
done_step
(
"
Read dark images
"
)
```
%% Cell type:code id: tags:
```
python
step_timer
.
start
()
dark
=
dffc
.
process_dark
(
images_dark
)
step_timer
.
done_step
(
"
Process dark images
"
)
```
%% Cell type:code id: tags:
```
python
step_timer
.
start
()
plot_camera_image
(
dark
)
plt
.
show
()
step_timer
.
done_step
(
"
Draw offset
map
"
)
# read
step_timer
.
start
(
)
dark_dc
=
RunDirectory
(
f
"
{
in_folder
}
/r
{
dark_run
:
04
d
}
"
,
include
=
f
"
RAW-R
{
dark_run
:
04
d
}
-
{
aggr
}
-S*.h5
"
)
dark_dc
=
dark_dc
.
select
([(
source
,
image_key
)]
)
key_data
=
dark_dc
[
source
][
image_key
]
images_dark
=
key_data
.
ndarray
(
)
ntrain
,
npulse
,
ny
,
nx
=
images_dark
.
shape
print
(
f
"
N image:
{
ntrain
*
npulse
}
(ntrain:
{
ntrain
}
, npulse:
{
npulse
}
)
"
)
print
(
f
"
Image size:
{
ny
}
x
{
nx
}
px
"
)
step_timer
.
done_step
(
"
Read dark images
"
)
# process
step_timer
.
start
()
dark
=
dffc
.
process_dark
(
images_dark
)
module_constants
=
constants
.
setdefault
(
source
,
{}
)
module_constants
[
"
Offset
"
]
=
dark
step_timer
.
done_step
(
"
Process dark images
"
)
display
()
# draw plots
step_timer
.
start
()
plot_camera_image
(
dark
)
plt
.
show
()
step_timer
.
done_step
(
"
Draw offset
s
"
)
```
%% Cell type:markdown id: tags:
# Flat-field PCA decomposition
%% Cell type:code id: tags:
```
python
step_timer
.
start
()
flat_dc
=
RunDirectory
(
f
"
{
in_folder
}
/r
{
flat_run
:
04
d
}
"
)
flat_dc
=
flat_dc
.
select
([(
source
,
image_key
)])
key_data
=
flat_dc
[
source
][
image_key
]
images_flat
=
key_data
.
ndarray
()
ntrain
,
npulse
,
ny
,
nx
=
images_flat
.
shape
print
(
f
"
N image:
{
ntrain
*
npulse
}
(ntrain:
{
ntrain
}
, npulse:
{
npulse
}
)
"
)
print
(
f
"
Image size:
{
ny
}
x
{
nx
}
px
"
)
step_timer
.
done_step
(
"
Read flat-field images
"
)
tm0
=
time
.
monotonic
()
tm_cm
=
time
.
monotonic
()
-
tm0
```
for
source
,
aggr
in
sources
.
items
():
display
(
Markdown
(
f
"
##
{
source
}
"
))
%% Cell type:code id: tags:
# read
step_timer
.
start
()
flat_dc
=
RunDirectory
(
f
"
{
in_folder
}
/r
{
flat_run
:
04
d
}
"
,
include
=
f
"
RAW-R
{
flat_run
:
04
d
}
-
{
aggr
}
-S*.h5
"
)
flat_dc
=
flat_dc
.
select
([(
source
,
image_key
)])
key_data
=
flat_dc
[
source
][
image_key
]
images_flat
=
key_data
.
ndarray
()
ntrain
,
npulse
,
ny
,
nx
=
images_flat
.
shape
print
(
f
"
N image:
{
ntrain
*
npulse
}
(ntrain:
{
ntrain
}
, npulse:
{
npulse
}
)
"
)
print
(
f
"
Image size:
{
ny
}
x
{
nx
}
px
"
)
step_timer
.
done_step
(
"
Read flat-field images
"
)
# process
step_timer
.
start
()
flat
,
components
,
explained_variance_ratio
=
dffc
.
process_flat
(
images_flat
,
dark
,
n_components
)
module_constants
=
constants
.
setdefault
(
source
,
{})
module_constants
[
"
DynamicFF
"
]
=
np
.
concatenate
([
flat
[
None
,
...],
components
])
step_timer
.
done_step
(
"
Process flat-field images
"
)
# draw plots
step_timer
.
start
()
display
(
Markdown
(
"
### Average flat-field
"
))
plot_camera_image
(
flat
)
plt
.
show
()
display
(
Markdown
(
"
### Explained variance ratio
"
))
fig
,
ax
=
plt
.
subplots
(
1
,
1
,
figsize
=
(
10
,
4
),
tight_layout
=
True
)
ax
.
semilogy
(
explained_variance_ratio
,
'
o
'
)
ax
.
set_xticks
(
np
.
arange
(
len
(
explained_variance_ratio
)))
ax
.
set_xlabel
(
"
Component no.
"
)
ax
.
set_ylabel
(
"
Variance fraction
"
)
plt
.
show
()
display
(
Markdown
(
"
### The first principal components (up to 20)
"
))
plot_images
(
components
[:
20
],
figsize
=
(
13
,
8
))
plt
.
show
()
```
python
step_timer
.
start
()
flat
,
components
,
explained_variance_ratio
=
dffc
.
process_flat
(
images_flat
,
dark
,
n_components
)
step_timer
.
done_step
(
"
Process flat-field images
"
)
```
%% Cell type:markdown id: tags:
## Average flat-field
%% Cell type:code id: tags:
```
python
step_timer
.
start
()
plot_camera_image
(
flat
)
plt
.
show
()
```
%% Cell type:markdown id: tags:
## Explained variance ratio
%% Cell type:code id: tags:
```
python
fig
,
ax
=
plt
.
subplots
(
1
,
1
,
figsize
=
(
10
,
4
),
tight_layout
=
True
)
ax
.
semilogy
(
explained_variance_ratio
,
'
o
'
)
ax
.
set_xticks
(
np
.
arange
(
len
(
explained_variance_ratio
)))
ax
.
set_xlabel
(
"
Component no.
"
)
ax
.
set_ylabel
(
"
Variance fraction
"
)
plt
.
show
()
```
%% Cell type:markdown id: tags:
# The first principal components (up to 20)
%% Cell type:code id: tags:
```
python
plot_images
(
components
[:
20
],
figsize
=
(
13
,
8
))
plt
.
show
()
step_timer
.
done_step
(
"
Draw flat-field map and components
"
)
step_timer
.
done_step
(
"
Draw flat-field
"
)
```
%% Cell type:markdown id: tags:
## Calibration constants
%% Cell type:code id: tags:
```
python
step_timer
.
start
()
# Output Folder Creation:
os
.
makedirs
(
out_folder
,
exist_ok
=
True
)
db_module
=
"
SHIMADZU_HPVX2_M001
"
constant_name
=
"
Offset
"
conditions
=
{
'
Memory cells
'
:
{
'
value
'
:
128
},
'
Pixels X
'
:
{
'
value
'
:
flat
.
shape
[
1
]},
'
Pixels Y
'
:
{
'
value
'
:
flat
.
shape
[
0
]},
'
FF components
'
:
{
'
value
'
:
components
.
shape
[
0
]}
}
data_to_store
=
{
'
condition
'
:
conditions
,
'
db_module
'
:
db_module
,
'
karabo_id
'
:
karabo_id
,
'
constant
'
:
constant_name
,
'
data
'
:
dark
,
'
creation_time
'
:
creation_time
.
replace
(
microsecond
=
0
),
'
file_loc
'
:
file_loc
,
'
report
'
:
report
,
}
ofile
=
f
"
{
out_folder
}
/const_
{
constant_name
}
_
{
db_module
}
.h5
"
if
os
.
path
.
isfile
(
ofile
):
print
(
f
'
File
{
ofile
}
already exists and will be overwritten
'
)
save_dict_to_hdf5
(
data_to_store
,
ofile
)
constant_name
=
"
ComponentsFF
"
data_to_store
.
update
({
'
constant
'
:
constant_name
,
'
data
'
:
np
.
concatenate
([
flat
[
None
,
...],
components
]),
})
ofile
=
f
"
{
out_folder
}
/const_
{
constant_name
}
_
{
db_module
}
.h5
"
if
os
.
path
.
isfile
(
ofile
):
print
(
f
'
File
{
ofile
}
already exists and will be overwritten
'
)
save_dict_to_hdf5
(
data_to_store
,
ofile
)
for
source
,
module_constants
in
constants
.
items
():
for
constant_name
,
data
in
module_constants
.
items
():
db_module
=
source_to_db
[
source
]
conditions
=
{
'
Frame Size
'
:
{
'
value
'
:
1.0
},
}
data_to_store
=
{
'
condition
'
:
conditions
,
'
db_module
'
:
db_module
,
'
karabo_id
'
:
karabo_id
,
'
constant
'
:
constant_name
,
'
data
'
:
data
,
'
creation_time
'
:
creation_time
.
replace
(
microsecond
=
0
),
'
file_loc
'
:
file_loc
,
'
report
'
:
report
,
}
ofile
=
f
"
{
out_folder
}
/const_
{
constant_name
}
_
{
db_module
}
.h5
"
if
os
.
path
.
isfile
(
ofile
):
print
(
f
'
File
{
ofile
}
already exists and will be overwritten
'
)
save_dict_to_hdf5
(
data_to_store
,
ofile
)
step_timer
.
done_step
(
"
Storing calibration constants
"
)
```
%% Cell type:code id: tags:
```
python
print
(
f
"
Total processing time
{
step_timer
.
timespan
()
:
.
01
f
}
s
"
)
step_timer
.
print_summary
()
```
...
...
This diff is collapsed.
Click to expand it.
Preview
0%
Loading
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Save comment
Cancel
Please
register
or
sign in
to comment