Skip to content
GitLab
Explore
Sign in
Primary navigation
Search or go to…
Project
T
ToolBox
Manage
Activity
Members
Labels
Plan
Issues
Issue boards
Milestones
Wiki
Code
Merge requests
Repository
Branches
Commits
Tags
Repository graph
Compare revisions
Snippets
Build
Pipelines
Jobs
Pipeline schedules
Artifacts
Deploy
Releases
Model registry
Operate
Environments
Monitor
Incidents
Analyze
Value stream analytics
Contributor analytics
CI/CD analytics
Repository analytics
Model experiments
Help
Help
Support
GitLab documentation
Compare GitLab plans
Community forum
Contribute to GitLab
Provide feedback
Keyboard shortcuts
?
Snippets
Groups
Projects
Show more breadcrumbs
SCS
ToolBox
Commits
dbb705b5
Commit
dbb705b5
authored
5 years ago
by
Laurent Mercadier
Browse files
Options
Downloads
Patches
Plain Diff
Generalized extractSaseBunchPattern() and renamed it to extractBunchPattern()
parent
ca508756
No related branches found
Branches containing commit
No related tags found
Tags containing commit
1 merge request
!66
Bunch pattern
Changes
1
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
Load.py
+58
-39
58 additions, 39 deletions
Load.py
with
58 additions
and
39 deletions
Load.py
+
58
−
39
View file @
dbb705b5
...
@@ -375,55 +375,71 @@ mnemonics = {
...
@@ -375,55 +375,71 @@ mnemonics = {
'
dim
'
:
[
'
gott_pId
'
,
'
pixelId
'
]}
'
dim
'
:
[
'
gott_pId
'
,
'
pixelId
'
]}
}
}
def
extract
Sase
BunchPattern
(
runDir
,
sase
=
3
):
def
extractBunchPattern
(
bp_table
=
None
,
key
=
'
sase3
'
,
runDir
=
None
):
'''
generate the
"
saseX
"
and
"
npulse_saseX
"
arrays
directly from the
bunch pattern
'''
generate the
bunch pattern and number of pulses of a source
directly from the
table and not using the MDL device BUNCH_DECODER. This is
inspired from the
bunch pattern
table and not using the MDL device BUNCH_DECODER. This is
euxfel_bunch_pattern p
roject
,
inspired by the
euxfel_bunch_pattern p
ackage
,
https://git.xfel.eu/gitlab/karaboDevices/euxfel_bunch_pattern
.git
https://git.xfel.eu/gitlab/karaboDevices/euxfel_bunch_pattern
Inputs:
Inputs:
runDir: run directory obtained by karabo_data.runDirectory()
bp_table: DataArray corresponding to the mnemonics
"
bunchPatternTable
"
.
sase: int, sase number between 1 and 3
If None, the bunch pattern table is loaded using runDir.
key: str, [
'
sase1
'
,
'
sase2
'
,
'
sase3
'
,
'
scs_ppl
'
]
runDir: karabo_data run directory. Required only if bp_table is None.
Outputs:
Outputs:
sase: DataArray containing indices of the sase pulses for each train
bunchPattern: DataArray containing indices of the sase/laser pulses for
npulses_sase: DataArray containing the number of pulses for each train
each train
npulses: DataArray containing the number of pulses for each train
'''
'''
if
not
(
1
<=
sase
<=
3
):
keys
=
[
'
sase1
'
,
'
sase2
'
,
'
sase3
'
,
'
scs_ppl
'
]
raise
ValueError
(
"
Invalid SASE value {!r}, expected 1-3
"
)
if
key
not
in
keys
:
# define relevant masks, see euxfel_bunch_pattern project for details
raise
ValueError
(
f
'
Invalid key
"
{
key
}
"
, possible values are
{
keys
}
'
)
if
bp_table
is
None
:
if
runDir
is
None
:
raise
ValueError
(
'
bp_table and runDir cannot both be None
'
)
bp_mnemo
=
mnemonics
[
'
bunchPatternTable
'
]
if
bp_mnemo
[
'
source
'
]
not
in
runDir
.
all_sources
:
raise
ValueError
(
'
Source {} not found in run
'
.
format
(
mnemonics
[
'
bunchPatternTable
'
][
'
source
'
]))
else
:
bp_table
=
runDir
.
get_array
(
bp_mnemo
[
'
source
'
],
bp_mnemo
[
'
key
'
],
extra_dims
=
bp_mnemo
[
'
dim
'
])
# define relevant masks, see euxfel_bunch_pattern package for details
DESTINATION_MASK
=
0xf
<<
18
DESTINATION_MASK
=
0xf
<<
18
DESTINATION_T4D
=
4
<<
18
# SASE1/3 dump
DESTINATION_T4D
=
4
<<
18
# SASE1/3 dump
DESTINATION_T5D
=
2
<<
18
# SASE2 dump
DESTINATION_T5D
=
2
<<
18
# SASE2 dump
PHOTON_LINE_DEFLECTION
=
1
<<
27
# Soft kick (e.g. SA3)
PHOTON_LINE_DEFLECTION
=
1
<<
27
# Soft kick (e.g. SA3)
bp_mnemo
=
mnemonics
[
'
bunchPatternTable
'
]
LASER_SEED6
=
1
<<
13
bp_table
=
runDir
.
get_array
(
bp_mnemo
[
'
source
'
],
bp_mnemo
[
'
key
'
],
if
'
sase
'
in
key
:
extra_dims
=
bp_mnemo
[
'
dim
'
])
sase
=
int
(
key
[
4
])
destination
=
DESTINATION_T5D
if
(
sase
==
2
)
else
DESTINATION_T4D
destination
=
DESTINATION_T5D
if
(
sase
==
2
)
else
DESTINATION_T4D
matched
=
(
bp_table
&
DESTINATION_MASK
)
==
destination
matched
=
(
bp_table
&
DESTINATION_MASK
)
==
destination
if
sase
==
1
:
if
sase
==
1
:
# Pulses to SASE 1 when soft kick is off
# Pulses to SASE 1 when soft kick is off
matched
&=
(
bp_table
&
PHOTON_LINE_DEFLECTION
)
==
0
matched
&=
(
bp_table
&
PHOTON_LINE_DEFLECTION
)
==
0
elif
sase
==
3
:
elif
sase
==
3
:
# Pulses to SASE 3 when soft kick is on
# Pulses to SASE 3 when soft kick is on
matched
&=
(
bp_table
&
PHOTON_LINE_DEFLECTION
)
!=
0
matched
&=
(
bp_table
&
PHOTON_LINE_DEFLECTION
)
!=
0
elif
key
==
'
scs_ppl
'
:
matched
=
(
bp_table
&
LASER_SEED6
)
!=
0
# create table of indices where bunch pattern and mask match
nz
=
np
.
nonzero
(
matched
.
values
)
nz
=
np
.
nonzero
(
matched
.
values
)
dim_pId
=
matched
.
shape
[
1
]
dim_pId
=
matched
.
shape
[
1
]
sase_array
=
np
.
ones
(
matched
.
shape
,
dtype
=
np
.
uint64
)
*
dim_pId
bunchPattern
=
np
.
ones
(
matched
.
shape
,
dtype
=
np
.
uint64
)
*
dim_pId
sase_array
[
nz
]
=
nz
[
1
]
bunchPattern
[
nz
]
=
nz
[
1
]
sase_array
=
np
.
sort
(
sase_array
)
bunchPattern
=
np
.
sort
(
bunchPattern
)
sase_array
[
sase_array
==
dim_pId
]
=
0
npulses
=
np
.
count_nonzero
(
bunchPattern
<
dim_pId
,
axis
=
1
)
bunchPattern
[
bunchPattern
==
dim_pId
]
=
0
sase_da
=
xr
.
DataArray
(
sase_array
[:,:
1000
],
dims
=
[
'
trainId
'
,
'
bunchId
'
],
bunchPattern
=
xr
.
DataArray
(
bunchPattern
[:,:
1000
],
dims
=
[
'
trainId
'
,
'
bunchId
'
],
coords
=
{
'
trainId
'
:
matched
.
trainId
},
coords
=
{
'
trainId
'
:
matched
.
trainId
},
name
=
f
'
sase
{
sase
}
'
)
name
=
key
)
npulses
_sase
=
xr
.
DataArray
(
np
.
count_nonzero
(
sase_da
,
axis
=
1
)
,
dims
=
[
'
trainId
'
],
npulses
=
xr
.
DataArray
(
np
ulses
,
dims
=
[
'
trainId
'
],
coords
=
{
'
trainId
'
:
matched
.
trainId
},
coords
=
{
'
trainId
'
:
matched
.
trainId
},
name
=
f
'
npulses_sase
{
sase
}
'
)
name
=
f
'
npulses_
{
key
}
'
)
return
sase_da
,
npulses_sase
return
bunchPattern
,
npulses
def
load
(
fields
,
runNB
,
proposalNB
,
subFolder
=
'
raw
'
,
display
=
False
,
validate
=
False
,
def
load
(
fields
,
runNB
,
proposalNB
,
subFolder
=
'
raw
'
,
display
=
False
,
validate
=
False
,
subset
=
by_index
[:],
rois
=
{},
useBPTable
=
True
):
subset
=
by_index
[:],
rois
=
{},
useBPTable
=
True
):
...
@@ -469,14 +485,17 @@ def load(fields, runNB, proposalNB, subFolder='raw', display=False, validate=Fal
...
@@ -469,14 +485,17 @@ def load(fields, runNB, proposalNB, subFolder='raw', display=False, validate=Fal
keys
=
[]
keys
=
[]
vals
=
[]
vals
=
[]
# load pulse pattern info
s
# load pulse pattern info
if
useBPTable
:
if
useBPTable
:
if
mnemonics
[
'
bunchPatternTable
'
][
'
source
'
]
not
in
run
.
all_sources
:
bp_mnemo
=
mnemonics
[
'
bunchPatternTable
'
]
if
bp_mnemo
[
'
source
'
]
not
in
run
.
all_sources
:
print
(
'
Source {} not found in run. Skipping!
'
.
format
(
print
(
'
Source {} not found in run. Skipping!
'
.
format
(
mnemonics
[
'
bunchPatternTable
'
][
'
source
'
]))
mnemonics
[
'
bunchPatternTable
'
][
'
source
'
]))
else
:
else
:
sase1
,
npulses_sase1
=
extractSaseBunchPattern
(
run
,
1
)
bp_table
=
run
.
get_array
(
bp_mnemo
[
'
source
'
],
bp_mnemo
[
'
key
'
],
sase3
,
npulses_sase3
=
extractSaseBunchPattern
(
run
,
3
)
extra_dims
=
bp_mnemo
[
'
dim
'
])
sase1
,
npulses_sase1
=
extractBunchPattern
(
bp_table
,
'
sase1
'
)
sase3
,
npulses_sase3
=
extractBunchPattern
(
bp_table
,
'
sase3
'
)
keys
+=
[
"
sase1
"
,
"
npulses_sase1
"
,
"
sase3
"
,
"
npulses_sase3
"
]
keys
+=
[
"
sase1
"
,
"
npulses_sase1
"
,
"
sase3
"
,
"
npulses_sase3
"
]
vals
+=
[
sase1
,
npulses_sase1
,
sase3
,
npulses_sase3
]
vals
+=
[
sase1
,
npulses_sase1
,
sase3
,
npulses_sase3
]
else
:
else
:
...
...
This diff is collapsed.
Click to expand it.
Preview
0%
Loading
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Save comment
Cancel
Please
register
or
sign in
to comment