Skip to content
Snippets Groups Projects
Commit 919e26f9 authored by Philipp Schmidt's avatar Philipp Schmidt
Browse files

Minor text, comment and code formatting improvements

parent 26527b5d
No related branches found
No related tags found
1 merge request!4Simplify reduction operation implementations and error handling
This commit is part of merge request !4. Comments created here will be created in the context of that merge request.
...@@ -199,7 +199,8 @@ class AgipdGain(ReductionMethod): ...@@ -199,7 +199,8 @@ class AgipdGain(ReductionMethod):
'explicit gain sources') 'explicit gain sources')
return return
self.log.info(f'Found {domain}') self.log.info(f'No detector specified, detected {domain} '
f'automatically')
agipd_sources = f'{domain}/DET/*CH0:xtdf' agipd_sources = f'{domain}/DET/*CH0:xtdf'
else: else:
...@@ -214,6 +215,8 @@ class AgipdGain(ReductionMethod): ...@@ -214,6 +215,8 @@ class AgipdGain(ReductionMethod):
# Unfortunately HED uses a different domain for # Unfortunately HED uses a different domain for
# its AGIPD control devices, hardcode it here # its AGIPD control devices, hardcode it here
# for convenience. # for convenience.
# TODO: This could also be solved via a CalCat
# query.
domain = domain.replace('_DET_', '_EXP_') domain = domain.replace('_DET_', '_EXP_')
control_source = f'{domain}/MDL/FPGA_COMP' control_source = f'{domain}/MDL/FPGA_COMP'
......
...@@ -33,8 +33,8 @@ def apply_by_key(op_name): ...@@ -33,8 +33,8 @@ def apply_by_key(op_name):
assert isinstance(self, ReduceWriter) assert isinstance(self, ReduceWriter)
for source_glob, key_glob, *args in self._filter_ops(op_name): for source_glob, key_glob, *args in self._filter_ops(op_name):
for source in fnmatch.filter(self._sources, source_glob): for source in fnmatch.filter(self._sources, source_glob):
keys = self._custom_keys.get(source, keys = self._custom_keys.get(
set(self._data[source].keys())) source, set(self._data[source].keys()))
for key in fnmatch.filter(keys, key_glob): for key in fnmatch.filter(keys, key_glob):
op_func(source, key, *args) op_func(source, key, *args)
...@@ -116,7 +116,6 @@ class ReduceWriter(SourceDataWriter): ...@@ -116,7 +116,6 @@ class ReduceWriter(SourceDataWriter):
raise ReduceInitError('Source may not be affected by both ' raise ReduceInitError('Source may not be affected by both '
'select-entries and select-xtdf operations') 'select-entries and select-xtdf operations')
if self._rechunked_keys.keys() & self._compressed_keys.keys(): if self._rechunked_keys.keys() & self._compressed_keys.keys():
raise ReduceInitError('Key may not be affected by both ' raise ReduceInitError('Key may not be affected by both '
'compress-keys and rechunk-keys') 'compress-keys and rechunk-keys')
...@@ -303,10 +302,12 @@ class ReduceWriter(SourceDataWriter): ...@@ -303,10 +302,12 @@ class ReduceWriter(SourceDataWriter):
# Keys are guaranteed to never use both custom chunking and # Keys are guaranteed to never use both custom chunking and
# compression. # compression.
if (source, key) in self._rechunked_keys: sourcekey = source, key
if sourcekey in self._rechunked_keys:
orig_chunks = kwargs['chunks'] orig_chunks = kwargs['chunks']
chunks = list(self._rechunked_keys[source, key]) chunks = list(self._rechunked_keys[sourcekey])
assert len(chunks) == len(orig_chunks) assert len(chunks) == len(orig_chunks)
for i, dim_len in enumerate(chunks): for i, dim_len in enumerate(chunks):
...@@ -319,14 +320,14 @@ class ReduceWriter(SourceDataWriter): ...@@ -319,14 +320,14 @@ class ReduceWriter(SourceDataWriter):
kwargs['chunks'] = tuple(chunks) kwargs['chunks'] = tuple(chunks)
elif (source, key) in self._compressed_keys or orig_dset.compression: elif sourcekey in self._compressed_keys or orig_dset.compression:
# TODO: Maintain more of existing properties, for now it is # TODO: Maintain more of existing properties, for now it is
# forced to use gzip and (1, *entry) chunking. # forced to use gzip and (1, *entry) chunking.
kwargs['chunks'] = (1,) + kwargs['shape'][1:] kwargs['chunks'] = (1,) + kwargs['shape'][1:]
kwargs['shuffle'] = True kwargs['shuffle'] = True
kwargs['compression'] = 'gzip' kwargs['compression'] = 'gzip'
kwargs['compression_opts'] = self._compressed_keys.setdefault( kwargs['compression_opts'] = self._compressed_keys.setdefault(
(source, key), orig_dset.compression_opts) sourcekey, orig_dset.compression_opts)
return kwargs return kwargs
...@@ -398,7 +399,8 @@ class ReduceWriter(SourceDataWriter): ...@@ -398,7 +399,8 @@ class ReduceWriter(SourceDataWriter):
@apply_by_source('select-entries') @apply_by_source('select-entries')
def _handle_select_entries(self, source, idx_group, train_sel, entry_sel): def _handle_select_entries(self, source, idx_group, train_sel, entry_sel):
if idx_group not in self._data[source].index_groups: if idx_group not in self._data[source].index_groups:
raise ReduceInitError(f'{idx_group} not index group of {source}') raise ReduceInitError(
f'{idx_group} not an index group of {source}')
self._custom_entry_masks.setdefault((source, idx_group), {}).update( self._custom_entry_masks.setdefault((source, idx_group), {}).update(
self._get_entry_masks(source, idx_group, train_sel, entry_sel)) self._get_entry_masks(source, idx_group, train_sel, entry_sel))
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment